diff mbox

[FFmpeg-devel,v1] avfilter: add colorstats, colorrgbstats, coloryuvstats video filter

Message ID 20191227111114.32557-1-lance.lmwang@gmail.com
State New
Headers show

Commit Message

Limin Wang Dec. 27, 2019, 11:11 a.m. UTC
From: Limin Wang <lance.lmwang@gmail.com>

Signed-off-by: Limin Wang <lance.lmwang@gmail.com>
---
 doc/filters.texi            |  74 ++++++
 libavfilter/Makefile        |   1 +
 libavfilter/allfilters.c    |   3 +
 libavfilter/vf_colorstats.c | 461 ++++++++++++++++++++++++++++++++++++
 4 files changed, 539 insertions(+)
 create mode 100644 libavfilter/vf_colorstats.c

Comments

Paul B Mahol Dec. 27, 2019, 11:35 a.m. UTC | #1
You are duplicating some functionality of signalstats filter.

On 12/27/19, lance.lmwang@gmail.com <lance.lmwang@gmail.com> wrote:
> From: Limin Wang <lance.lmwang@gmail.com>
>
> Signed-off-by: Limin Wang <lance.lmwang@gmail.com>
> ---
>  doc/filters.texi            |  74 ++++++
>  libavfilter/Makefile        |   1 +
>  libavfilter/allfilters.c    |   3 +
>  libavfilter/vf_colorstats.c | 461 ++++++++++++++++++++++++++++++++++++
>  4 files changed, 539 insertions(+)
>  create mode 100644 libavfilter/vf_colorstats.c
>
> diff --git a/doc/filters.texi b/doc/filters.texi
> index 8c5d3a5760..81968b2c17 100644
> --- a/doc/filters.texi
> +++ b/doc/filters.texi
> @@ -7695,6 +7695,80 @@ For example to convert the input to SMPTE-240M, use
> the command:
>  colorspace=smpte240m
>  @end example
>
> +@section colorstats, colorrgbstats, coloryuvstats
> +The filter provides statistical video measurements such as mean, minimum,
> maximum and
> +standard deviation for each frame. The user can check for
> unexpected/accidental errors
> +very quickly with them.
> +
> +@var{colorrgbstats} report the color stats for RGB input video,
> @var{coloryuvstats}
> +to an YUV input video.
> +
> +These filters accept the following parameters:
> +@table @option
> +@item planes
> +Set which planes to filter. Default is only the first plane.
> +@end table
> +
> +By default the filter will report these metadata values if the planes
> +are processed:
> +
> +@table @option
> +@item min.y, min.u, min.v, min.r, min.g, min.b, min.a
> +Display the minimal Y/U/V/R/G/B/A plane value contained within the input
> frame.
> +Expressed in range of [0, 1<<bitdepth-1]
> +
> +@item pmin.y, pmin.u, pmin.v, pmin.r, pmin.g, pmin.b, min.a
> +Display the minimal Y/U/V/R/G/B/A plane percentage of maximum contained
> within
> +the input frame. Expressed in range of [0, 1]
> +
> +@item max.y, max.u, max.v, max.r, max.g, max.b, max.a
> +Display the maximum Y/U/V/R/G/B/A plane value contained within the input
> frame.
> +Expressed in range of [0, 1<<bitdepth-1]
> +
> +@item pmax.y, pmax.u, pmax.v, pmax.r, pmax.g, pmax.b, pmax.a
> +Display the maximum Y/U/V/R/G/B/A plane percentage of maximum contained
> within
> +the input frame. Expressed in range of [0, 1]
> +
> +@item mean.y, mean.u, mean.v, mean.r, mean.g, mean.b, mean.a
> +Display the Y/U/V/R/G/B/A plane mean value contained within the input
> frame.
> +Expressed in range of [0, 1<<bitdepth-1]
> +
> +@item pmean.y, pmean.u, pmean.v, pmean.r, pmean.g, pmean.b, pmean.a
> +Display the Y/U/V/R/G/B/A plane mean value percentage of maximum contained
> within
> +the input frame. Expressed in range of [0, 1]
> +
> +@item stdev.y, stdev.u, stdev.v, stdev.r, stdev.g, stdev.b, stdev.a
> +Display the Y/U/V/R/G/B/A plane standard deviation value contained within
> the
> +input frame. Expressed in range of [0, 1<<bitdepth-1]
> +
> +@item pstdev.y, pstdev.u, pstdev.v, pstdev.r, pstdev.g, pstdev.b, pstdev.a
> +Display the Y/U/V/R/G/B/A plane standard deviation value percentage of
> maximum contained
> +within the input frame. Expressed in range of [0, 1]
> +@end table
> +
> +@subsection Examples
> +
> +@itemize
> +@item
> +Show all YUV color stats for each frame:
> +@example
> +ffprobe -f lavfi movie=example.mov,coloryuvstats=planes=0xf -show_frames
> +@end example
> +
> +@item
> +Draw graph for the pmean and pstdev value of the Y plane per frame:
> +@example
> +ffplay -i example.mov -vf
> coloryuvstats,drawgraph=m1=lavf.colorstats.pmean.y:m2=lavf.colorstats.pstdev.y:min=0:max=1
> +@end example
> +
> +@item
> +Print all RGB color stats for each frame:
> +@example
> +ffplay -i example.mov -vf colorrgbstats=planes=0xf,metadata=mode=print
> +@end example
> +
> +@end itemize
> +
>  @section convolution
>
>  Apply convolution of 3x3, 5x5, 7x7 or horizontal/vertical up to 49
> elements.
> diff --git a/libavfilter/Makefile b/libavfilter/Makefile
> index 37d4eee858..a007bd32d1 100644
> --- a/libavfilter/Makefile
> +++ b/libavfilter/Makefile
> @@ -182,6 +182,7 @@ OBJS-$(CONFIG_CIESCOPE_FILTER)               +=
> vf_ciescope.o
>  OBJS-$(CONFIG_CODECVIEW_FILTER)              += vf_codecview.o
>  OBJS-$(CONFIG_COLORBALANCE_FILTER)           += vf_colorbalance.o
>  OBJS-$(CONFIG_COLORCHANNELMIXER_FILTER)      += vf_colorchannelmixer.o
> +OBJS-$(CONFIG_COLORSTATS_FILTER)             += vf_colorstats.o
>  OBJS-$(CONFIG_COLORKEY_FILTER)               += vf_colorkey.o
>  OBJS-$(CONFIG_COLORKEY_OPENCL_FILTER)        += vf_colorkey_opencl.o
> opencl.o \
>                                                  opencl/colorkey.o
> diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
> index c295f8e403..6b84a45452 100644
> --- a/libavfilter/allfilters.c
> +++ b/libavfilter/allfilters.c
> @@ -172,6 +172,9 @@ extern AVFilter ff_vf_ciescope;
>  extern AVFilter ff_vf_codecview;
>  extern AVFilter ff_vf_colorbalance;
>  extern AVFilter ff_vf_colorchannelmixer;
> +extern AVFilter ff_vf_colorstats;
> +extern AVFilter ff_vf_colorrgbstats;
> +extern AVFilter ff_vf_coloryuvstats;
>  extern AVFilter ff_vf_colorkey;
>  extern AVFilter ff_vf_colorkey_opencl;
>  extern AVFilter ff_vf_colorhold;
> diff --git a/libavfilter/vf_colorstats.c b/libavfilter/vf_colorstats.c
> new file mode 100644
> index 0000000000..7e94c572f9
> --- /dev/null
> +++ b/libavfilter/vf_colorstats.c
> @@ -0,0 +1,461 @@
> +/*
> + * This file is part of FFmpeg.
> + *
> + * FFmpeg is free software; you can redistribute it and/or
> + * modify it under the terms of the GNU Lesser General Public
> + * License as published by the Free Software Foundation; either
> + * version 2.1 of the License, or (at your option) any later version.
> + *
> + * FFmpeg is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> + * Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public
> + * License along with FFmpeg; if not, write to the Free Software
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
> USA
> + */
> +
> +#include "libavutil/imgutils.h"
> +#include "libavutil/opt.h"
> +#include "libavutil/pixdesc.h"
> +
> +#include "avfilter.h"
> +#include "drawutils.h"
> +#include "filters.h"
> +
> +#define R 0
> +#define G 1
> +#define B 2
> +#define A 3
> +
> +typedef struct ThreadData {
> +    AVFrame *in;
> +} ThreadData;
> +
> +typedef struct ColorStatsContext {
> +    const AVClass *class;
> +
> +    ptrdiff_t width[4];
> +    ptrdiff_t height[4];
> +
> +    int planes;
> +    int step;
> +    int bitdepth;
> +    int nb_components;
> +    int thread_count;
> +    int is_16bit;
> +    int is_rgb;
> +    int force_fmt; /* 0: all, 1: rgb, 2: yuv */
> +    uint8_t rgba_map[4];
> +    char comps[4];
> +
> +    double *mean[4];
> +    double *stdev[4];
> +
> +    int max;
> +    double *min_value[4];
> +    double *max_value[4];
> +
> +    int  (*stats_slice)(AVFilterContext *ctx, void *arg, int jobnr, int
> nb_jobs);
> +} ColorStatsContext;
> +
> +#define OFFSET(x) offsetof(ColorStatsContext, x)
> +#define V AV_OPT_FLAG_VIDEO_PARAM
> +#define F AV_OPT_FLAG_FILTERING_PARAM
> +
> +static const AVOption options[] = {
> +    { "planes", "set planes to filter", OFFSET(planes), AV_OPT_TYPE_INT,
> {.i64=1}, 1, 0xf, V|F},
> +    {NULL}
> +};
> +
> +#define YUV_FORMATS                                                 \
> +    AV_PIX_FMT_YUV444P,  AV_PIX_FMT_YUV422P,  AV_PIX_FMT_YUV420P,   \
> +    AV_PIX_FMT_YUV411P,  AV_PIX_FMT_YUV410P,  AV_PIX_FMT_YUV440P,   \
> +    AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P,                       \
> +    AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ440P,                       \
> +    AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV422P9,                       \
> +    AV_PIX_FMT_YUV420P9,                                            \
> +    AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV422P10,                     \
> +    AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV440P10,                     \
> +    AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12,                     \
> +    AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV440P12,                     \
> +    AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14,                     \
> +    AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV444P16,                     \
> +    AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV420P16,                     \
> +    AV_PIX_FMT_YUVA420P,  AV_PIX_FMT_YUVA422P,                      \
> +    AV_PIX_FMT_YUVA444P16,AV_PIX_FMT_YUVA422P16,                    \
> +    AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P12,                    \
> +    AV_PIX_FMT_YUVA444P12,AV_PIX_FMT_YUVA444P,                      \
> +    AV_PIX_FMT_GRAY8,     AV_PIX_FMT_GRAY9,   AV_PIX_FMT_GRAY10,    \
> +    AV_PIX_FMT_GRAY12,    AV_PIX_FMT_GRAY14,  AV_PIX_FMT_GRAY16
> +
> +#define RGB_FORMATS                                                 \
> +    AV_PIX_FMT_0RGB,      AV_PIX_FMT_0BGR,                          \
> +    AV_PIX_FMT_RGB0,      AV_PIX_FMT_BGR0,                          \
> +    AV_PIX_FMT_ARGB,      AV_PIX_FMT_RGBA,    AV_PIX_FMT_ABGR,      \
> +    AV_PIX_FMT_BGRA,      AV_PIX_FMT_RGB24,   AV_PIX_FMT_BGR24,     \
> +    AV_PIX_FMT_RGB48,     AV_PIX_FMT_RGBA64,  AV_PIX_FMT_GBRP,      \
> +    AV_PIX_FMT_GBRAP,     AV_PIX_FMT_GBRP9,   AV_PIX_FMT_GBRP10,    \
> +    AV_PIX_FMT_GBRAP10,   AV_PIX_FMT_GBRP12,  AV_PIX_FMT_GBRP14,    \
> +    AV_PIX_FMT_GBRP16,    AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16
> +
> +static const enum AVPixelFormat yuv_pix_fmts[] = { YUV_FORMATS,
> AV_PIX_FMT_NONE };
> +static const enum AVPixelFormat rgb_pix_fmts[] = { RGB_FORMATS,
> AV_PIX_FMT_NONE };
> +static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS,
> YUV_FORMATS, AV_PIX_FMT_NONE };
> +
> +static int query_formats(AVFilterContext *ctx)
> +{
> +    const ColorStatsContext *s = ctx->priv;
> +    const enum AVPixelFormat *pix_fmts = s->force_fmt == 1 ? rgb_pix_fmts :
> +                                         s->force_fmt == 2 ? yuv_pix_fmts :
> +                                         all_pix_fmts;
> +
> +    AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
> +    if (!fmts_list)
> +        return AVERROR(ENOMEM);
> +    return ff_set_common_formats(ctx, fmts_list);
> +}
> +
> +#define DECLARE_STATS_PLANAR_FUNC(nbits, div)
>                       \
> +static int stats_slice_planar_##nbits(AVFilterContext *ctx, void *arg, int
> jobnr, int nb_jobs)    \
> +{
>                       \
> +    const ColorStatsContext *s = ctx->priv;
>                       \
> +    ThreadData *td = arg;
>                       \
> +    const AVFrame *in = td->in;
>                       \
> +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
>                       \
> +    int32_t count[4] = { 0 };
>                       \
> +    double min_value[4] = { s->max };
>                       \
> +    double max_value[4] = { 0 };
>                       \
> +
>                       \
> +    for (int i = 0; i < s->nb_components; i++) {
>                       \
> +        const int width = s->width[i];
>                       \
> +        const int height = s->height[i];
>                       \
> +        const int slice_start = (height *  jobnr     ) / nb_jobs;
>                       \
> +        const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
>                       \
> +        int linesize = in->linesize[i] / div;
>                       \
> +        uint##nbits##_t *src = (uint##nbits##_t*)in->data[i] + slice_start
> * linesize;            \
> +
>                       \
> +        if (!(s->planes & (1 << i)))
>                       \
> +            continue;
>                       \
> +        for (int j = slice_start; j < slice_end; j++) {
>                       \
> +            for (int x = 0; x < width; x++) {
>                       \
> +                sum[i] += src[x];
>                       \
> +                sum2[i] += src[x] * src[x];
>                       \
> +                if (src[i] > max_value[i]) max_value[i] = src[i];
>                       \
> +                if (src[i] < min_value[i]) min_value[i] = src[i];
>                       \
> +            }
>                       \
> +            count[i] += width;
>                       \
> +            src += linesize;
>                       \
> +        }
>                       \
> +
>                       \
> +        s->mean[i][jobnr] = (double)(sum[i] + count[i] / 2) / count[i];
>                       \
> +        s->stdev[i][jobnr] = sqrt((sum2[i] - sum[i] * (double)sum[i] /
> count[i]) / count[i]);     \
> +        s->min_value[i][jobnr] = min_value[i];
>                       \
> +        s->max_value[i][jobnr] = max_value[i];
>                       \
> +    }
>                       \
> +
>                       \
> +    return 0;
>                       \
> +}
> +DECLARE_STATS_PLANAR_FUNC(8, 1)
> +DECLARE_STATS_PLANAR_FUNC(16, 2)
> +
> +#define DECLARE_STATS_PACKED_FUNC(nbits, div)
>                       \
> +static int stats_slice_packed_##nbits(AVFilterContext *ctx, void *arg, int
> jobnr, int nb_jobs)    \
> +{
>                       \
> +    const ColorStatsContext *s = ctx->priv;
>                       \
> +    ThreadData *td = arg;
>                       \
> +    const AVFrame *in = td->in;
>                       \
> +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
>                       \
> +    double min_value[4] = { s->max };
>                       \
> +    double max_value[4] = { 0 };
>                       \
> +    int32_t count[4] = { 0 };
>                       \
> +    const int width = in->width;
>                       \
> +    const int height = in->height;
>                       \
> +    const int slice_start = (height *  jobnr     ) / nb_jobs;
>                       \
> +    const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
>                       \
> +    int linesize = in->linesize[0] / div;
>                       \
> +    uint##nbits##_t *src = (uint##nbits##_t*)in->data[0] + slice_start *
> linesize;                \
> +    const uint8_t ro = s->rgba_map[R];
>                       \
> +    const uint8_t go = s->rgba_map[G];
>                       \
> +    const uint8_t bo = s->rgba_map[B];
>                       \
> +    const uint8_t ao = s->rgba_map[A];
>                       \
> +
>                       \
> +    for (int y = slice_start; y < slice_end; y++) {
>                       \
> +        for (int x = 0; x < width * s->step; x += s->step) {
>                       \
> +            const int r = src[x + ro];
>                       \
> +            const int g = src[x + go];
>                       \
> +            const int b = src[x + bo];
>                       \
> +            const int a = src[x + ao];
>                       \
> +
>                       \
> +            sum[ro] += r;
>                       \
> +            sum[go] += g;
>                       \
> +            sum[bo] += b;
>                       \
> +            sum2[ro] += r * r;
>                       \
> +            sum2[go] += g * g;
>                       \
> +            sum2[bo] += b * b;
>                       \
> +
>                       \
> +            if (r > max_value[ro]) max_value[ro] = r;
>                       \
> +            if (r < min_value[ro]) min_value[ro] = r;
>                       \
> +            if (g > max_value[go]) max_value[go] = g;
>                       \
> +            if (g < min_value[go]) min_value[go] = g;
>                       \
> +            if (b > max_value[bo]) max_value[bo] = b;
>                       \
> +            if (b < min_value[bo]) min_value[bo] = b;
>                       \
> +            if (s->step == 4) {
>                       \
> +                sum2[ao] += a * a;
>                       \
> +                sum[ao] += a;
>                       \
> +                if (a > max_value[ao]) max_value[ao] = a;
>                       \
> +                if (a < min_value[ao]) min_value[ao] = a;
>                       \
> +            }
>                       \
> +        }
>                       \
> +        count[ro] += width;
>                       \
> +        count[go] += width;
>                       \
> +        count[bo] += width;
>                       \
> +        if (s->step == 4)
>                       \
> +            count[ao] += width;
>                       \
> +        src += linesize;
>                       \
> +    }
>                       \
> +
>                       \
> +    for (int p = 0; p < s->nb_components; p++) {
>                       \
> +        int ci = s->is_rgb ? s->rgba_map[p] : p;
>                       \
> +        double variance;
>                       \
> +
>                       \
> +        s->mean[ci][jobnr] = (double)(sum[ci] + count[ci] / 2) / count[ci];
>                       \
> +        variance = (sum2[ci] - sum[ci] * (double)sum[ci] / count[ci]) /
> count[ci];                \
> +        s->stdev[ci][jobnr] = sqrt(variance);
>                       \
> +        s->min_value[ci][jobnr] = min_value[ci];
>                       \
> +        s->max_value[ci][jobnr] = max_value[ci];
>                       \
> +    }
>                       \
> +
>                       \
> +    return 0;
>                       \
> +}
> +DECLARE_STATS_PACKED_FUNC(8, 1)
> +DECLARE_STATS_PACKED_FUNC(16, 2)
> +
> +static av_cold void uninit(AVFilterContext *ctx)
> +{
> +    ColorStatsContext *s = ctx->priv;
> +
> +    for (int i = 0; i < s->nb_components; i++) {
> +        av_freep(&s->mean[i]);
> +        av_freep(&s->stdev[i]);
> +        av_freep(&s->min_value[i]);
> +        av_freep(&s->max_value[i]);
> +    }
> +}
> +
> +static int config_input(AVFilterLink *inlink)
> +{
> +    AVFilterContext *ctx = inlink->dst;
> +    ColorStatsContext *s = ctx->priv;
> +    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
> +
> +    s->nb_components = desc->nb_components;
> +    s->bitdepth = desc->comp[0].depth;
> +    s->is_16bit = s->bitdepth > 8;
> +    s->step = av_get_padded_bits_per_pixel(desc) >> (3 + s->is_16bit);
> +    s->max  = 1 << s->bitdepth - 1;
> +
> +    s->is_rgb = ff_fill_rgba_map(s->rgba_map, inlink->format) >= 0;
> +    s->comps[0] = s->is_rgb ? 'r' : 'y' ;
> +    s->comps[1] = s->is_rgb ? 'g' : 'u' ;
> +    s->comps[2] = s->is_rgb ? 'b' : 'v' ;
> +    s->comps[3] = 'a';
> +
> +    s->thread_count = FFMAX(1, FFMIN(inlink->h,
> ff_filter_get_nb_threads(ctx)));
> +    for (int i = 0; i < s->nb_components; i++) {
> +        ptrdiff_t line_size = av_image_get_linesize(inlink->format,
> inlink->w, i);
> +
> +        s->width[i] = line_size >> (s->bitdepth > 8);
> +        s->height[i] = inlink->h >> ((i == 1 || i == 2) ?
> desc->log2_chroma_h : 0);
> +
> +        s->mean[i] = av_mallocz_array(s->thread_count,
> sizeof(*s->mean[i]));
> +        s->stdev[i] = av_mallocz_array(s->thread_count,
> sizeof(*s->stdev[i]));
> +        s->max_value[i] = av_mallocz_array(s->thread_count,
> sizeof(*s->max_value[i]));
> +        s->min_value[i] = av_mallocz_array(s->thread_count,
> sizeof(*s->min_value[i]));
> +        if (!s->mean[i] || !s->stdev[i] || !s->max_value[i] ||
> !s->min_value[i])
> +            return AVERROR(ENOMEM);
> +        for (int j = 0; j < s->thread_count; j++) {
> +            s->min_value[i][j] = (1 << s->bitdepth);
> +            s->max_value[i][j] = 0;
> +        }
> +    }
> +
> +    if (desc->flags & AV_PIX_FMT_FLAG_PLANAR)
> +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_planar_8 :
> stats_slice_planar_16;
> +    else
> +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_packed_8 :
> stats_slice_packed_16;
> +
> +    return 0;
> +}
> +
> +static void set_meta_float(AVDictionary **metadata, const char *key, char
> c, float d)
> +{
> +    char value[128];
> +    char key2[128];
> +
> +    snprintf(value, sizeof(value), "%.2f", d);
> +    if (c)
> +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
> +    else
> +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
> +    av_dict_set(metadata, key2, value, 0);
> +}
> +
> +static void set_meta_int(AVDictionary **metadata, const char *key, char c,
> int d)
> +{
> +    char value[128];
> +    char key2[128];
> +
> +    snprintf(value, sizeof(value), "%d", d);
> +    if (c)
> +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
> +    else
> +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
> +    av_dict_set(metadata, key2, value, 0);
> +}
> +
> +static void report_detect_result(AVFilterContext *ctx, AVFrame *in)
> +{
> +    const ColorStatsContext *s = ctx->priv;
> +    double mean[4] = { 0 };
> +    double stdev[4] = { 0 };
> +    double min_value[4] = { s->max };
> +    double max_value[4] = { 0 };
> +    int cidx;
> +
> +    for (int p = 0; p < s->nb_components; p++) {
> +        cidx = s->is_rgb ? s->rgba_map[p] : p;
> +
> +        if (!(s->planes & (1 << p)))
> +            continue;
> +
> +        for (int j = 0; j < s->thread_count; j++) {
> +            mean[cidx] += s->mean[cidx][j];
> +            stdev[cidx] += s->stdev[cidx][j];
> +            if (s->min_value[cidx][j] < min_value[cidx])
> +                min_value[cidx] = s->min_value[cidx][j];
> +            if (s->max_value[cidx][j] > max_value[cidx])
> +                max_value[cidx] = s->max_value[cidx][j];
> +        }
> +        mean[cidx] = mean[cidx] / s->thread_count;
> +        stdev[cidx] = stdev[cidx] / s->thread_count;
> +
> +        set_meta_int(&in->metadata, "min", s->comps[p], min_value[cidx]);
> +        set_meta_int(&in->metadata, "max", s->comps[p], max_value[cidx]);
> +        set_meta_int(&in->metadata, "mean", s->comps[p], mean[cidx]);
> +        set_meta_int(&in->metadata, "stdev", s->comps[p], stdev[cidx]);
> +
> +        set_meta_float(&in->metadata, "pmin", s->comps[p], min_value[cidx]
> / s->max);
> +        set_meta_float(&in->metadata, "pmax", s->comps[p], max_value[cidx]
> / s->max);
> +        set_meta_float(&in->metadata, "pmean", s->comps[p], mean[cidx] /
> s->max);
> +        set_meta_float(&in->metadata, "pstdev", s->comps[p], stdev[cidx] /
> s->max);
> +    }
> +}
> +
> +static int activate(AVFilterContext *ctx)
> +{
> +    int ret;
> +    AVFilterLink *inlink = ctx->inputs[0];
> +    AVFilterLink *outlink = ctx->outputs[0];
> +    ColorStatsContext *s = ctx->priv;
> +    AVFrame *in;
> +    ThreadData td;
> +
> +    FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
> +
> +    ret = ff_inlink_consume_frame(inlink, &in);
> +    if (ret < 0)
> +        return ret;
> +
> +    if (in) {
> +        td.in = in;
> +        ctx->internal->execute(ctx, s->stats_slice, &td, NULL,
> s->thread_count);
> +
> +        report_detect_result(ctx, in);
> +        return ff_filter_frame(outlink, in);
> +    }
> +
> +    FF_FILTER_FORWARD_STATUS(inlink, outlink);
> +    FF_FILTER_FORWARD_WANTED(outlink, inlink);
> +
> +    return FFERROR_NOT_READY;
> +}
> +
> +static const AVFilterPad inputs[] = {
> +    {
> +        .name         = "default",
> +        .type         = AVMEDIA_TYPE_VIDEO,
> +        .config_props = config_input,
> +    },
> +    { NULL }
> +};
> +
> +static const AVFilterPad outputs[] = {
> +    {
> +        .name          = "default",
> +        .type          = AVMEDIA_TYPE_VIDEO,
> +    },
> +    { NULL }
> +};
> +
> +#define DEFINE_COLOR_FILTER(name_, description_)                        \
> +    AVFilter ff_vf_##name_ = {                                          \
> +        .name          = #name_,                                        \
> +        .description   = NULL_IF_CONFIG_SMALL(description_),            \
> +        .priv_size     = sizeof(ColorStatsContext),                     \
> +        .priv_class    = &name_ ## _class,                              \
> +        .init          = name_##_init,                                  \
> +        .uninit        = uninit,                                        \
> +        .query_formats = query_formats,                                 \
> +        .inputs        = inputs,                                        \
> +        .outputs       = outputs,                                       \
> +        .activate      = activate,                                      \
> +        .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC |       \
> +                         AVFILTER_FLAG_SLICE_THREADS,                   \
> +    }
> +
> +#if CONFIG_COLORSTATS_FILTER
> +
> +#define colorstats_options options
> +AVFILTER_DEFINE_CLASS(colorstats);
> +
> +static int colorstats_init(AVFilterContext *ctx)
> +{
> +    return 0;
> +}
> +
> +DEFINE_COLOR_FILTER(colorstats, "Video color stats.");
> +#endif
> +
> +#if CONFIG_COLORRGBSTATS_FILTER
> +
> +#define colorrgbstats_options options
> +AVFILTER_DEFINE_CLASS(colorrgbstats);
> +
> +static int colorrgbstats_init(AVFilterContext *ctx)
> +{
> +    ColorStatsContext *s = ctx->priv;
> +
> +    s->force_fmt = 1;
> +    return 0;
> +}
> +
> +DEFINE_COLOR_FILTER(colorrgbstats, "Video RGB color stats.");
> +#endif
> +
> +#if CONFIG_COLORYUVSTATS_FILTER
> +
> +#define coloryuvstats_options options
> +AVFILTER_DEFINE_CLASS(coloryuvstats);
> +
> +static int coloryuvstats_init(AVFilterContext *ctx)
> +{
> +    ColorStatsContext *s = ctx->priv;
> +
> +    s->force_fmt = 2;
> +    return 0;
> +}
> +
> +DEFINE_COLOR_FILTER(coloryuvstats, "Video YUV color stats.");
> +#endif
> --
> 2.21.0
>
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
Limin Wang Dec. 27, 2019, 1:32 p.m. UTC | #2
On Fri, Dec 27, 2019 at 12:35:25PM +0100, Paul B Mahol wrote:
> You are duplicating some functionality of signalstats filter.
> 
Yes, I have other function need to use the mean and stdev which is
support in showinfo filter(only 8bit and don't support packed format,
no multi-thread), and signalstats don't support rgb format and don't
have stdev, also it have too many other function and difficult to change
it, so I think it's more simple to create a new filter to do it.


> On 12/27/19, lance.lmwang@gmail.com <lance.lmwang@gmail.com> wrote:
> > From: Limin Wang <lance.lmwang@gmail.com>
> >
> > Signed-off-by: Limin Wang <lance.lmwang@gmail.com>
> > ---
> >  doc/filters.texi            |  74 ++++++
> >  libavfilter/Makefile        |   1 +
> >  libavfilter/allfilters.c    |   3 +
> >  libavfilter/vf_colorstats.c | 461 ++++++++++++++++++++++++++++++++++++
> >  4 files changed, 539 insertions(+)
> >  create mode 100644 libavfilter/vf_colorstats.c
> >
> > diff --git a/doc/filters.texi b/doc/filters.texi
> > index 8c5d3a5760..81968b2c17 100644
> > --- a/doc/filters.texi
> > +++ b/doc/filters.texi
> > @@ -7695,6 +7695,80 @@ For example to convert the input to SMPTE-240M, use
> > the command:
> >  colorspace=smpte240m
> >  @end example
> >
> > +@section colorstats, colorrgbstats, coloryuvstats
> > +The filter provides statistical video measurements such as mean, minimum,
> > maximum and
> > +standard deviation for each frame. The user can check for
> > unexpected/accidental errors
> > +very quickly with them.
> > +
> > +@var{colorrgbstats} report the color stats for RGB input video,
> > @var{coloryuvstats}
> > +to an YUV input video.
> > +
> > +These filters accept the following parameters:
> > +@table @option
> > +@item planes
> > +Set which planes to filter. Default is only the first plane.
> > +@end table
> > +
> > +By default the filter will report these metadata values if the planes
> > +are processed:
> > +
> > +@table @option
> > +@item min.y, min.u, min.v, min.r, min.g, min.b, min.a
> > +Display the minimal Y/U/V/R/G/B/A plane value contained within the input
> > frame.
> > +Expressed in range of [0, 1<<bitdepth-1]
> > +
> > +@item pmin.y, pmin.u, pmin.v, pmin.r, pmin.g, pmin.b, min.a
> > +Display the minimal Y/U/V/R/G/B/A plane percentage of maximum contained
> > within
> > +the input frame. Expressed in range of [0, 1]
> > +
> > +@item max.y, max.u, max.v, max.r, max.g, max.b, max.a
> > +Display the maximum Y/U/V/R/G/B/A plane value contained within the input
> > frame.
> > +Expressed in range of [0, 1<<bitdepth-1]
> > +
> > +@item pmax.y, pmax.u, pmax.v, pmax.r, pmax.g, pmax.b, pmax.a
> > +Display the maximum Y/U/V/R/G/B/A plane percentage of maximum contained
> > within
> > +the input frame. Expressed in range of [0, 1]
> > +
> > +@item mean.y, mean.u, mean.v, mean.r, mean.g, mean.b, mean.a
> > +Display the Y/U/V/R/G/B/A plane mean value contained within the input
> > frame.
> > +Expressed in range of [0, 1<<bitdepth-1]
> > +
> > +@item pmean.y, pmean.u, pmean.v, pmean.r, pmean.g, pmean.b, pmean.a
> > +Display the Y/U/V/R/G/B/A plane mean value percentage of maximum contained
> > within
> > +the input frame. Expressed in range of [0, 1]
> > +
> > +@item stdev.y, stdev.u, stdev.v, stdev.r, stdev.g, stdev.b, stdev.a
> > +Display the Y/U/V/R/G/B/A plane standard deviation value contained within
> > the
> > +input frame. Expressed in range of [0, 1<<bitdepth-1]
> > +
> > +@item pstdev.y, pstdev.u, pstdev.v, pstdev.r, pstdev.g, pstdev.b, pstdev.a
> > +Display the Y/U/V/R/G/B/A plane standard deviation value percentage of
> > maximum contained
> > +within the input frame. Expressed in range of [0, 1]
> > +@end table
> > +
> > +@subsection Examples
> > +
> > +@itemize
> > +@item
> > +Show all YUV color stats for each frame:
> > +@example
> > +ffprobe -f lavfi movie=example.mov,coloryuvstats=planes=0xf -show_frames
> > +@end example
> > +
> > +@item
> > +Draw graph for the pmean and pstdev value of the Y plane per frame:
> > +@example
> > +ffplay -i example.mov -vf
> > coloryuvstats,drawgraph=m1=lavf.colorstats.pmean.y:m2=lavf.colorstats.pstdev.y:min=0:max=1
> > +@end example
> > +
> > +@item
> > +Print all RGB color stats for each frame:
> > +@example
> > +ffplay -i example.mov -vf colorrgbstats=planes=0xf,metadata=mode=print
> > +@end example
> > +
> > +@end itemize
> > +
> >  @section convolution
> >
> >  Apply convolution of 3x3, 5x5, 7x7 or horizontal/vertical up to 49
> > elements.
> > diff --git a/libavfilter/Makefile b/libavfilter/Makefile
> > index 37d4eee858..a007bd32d1 100644
> > --- a/libavfilter/Makefile
> > +++ b/libavfilter/Makefile
> > @@ -182,6 +182,7 @@ OBJS-$(CONFIG_CIESCOPE_FILTER)               +=
> > vf_ciescope.o
> >  OBJS-$(CONFIG_CODECVIEW_FILTER)              += vf_codecview.o
> >  OBJS-$(CONFIG_COLORBALANCE_FILTER)           += vf_colorbalance.o
> >  OBJS-$(CONFIG_COLORCHANNELMIXER_FILTER)      += vf_colorchannelmixer.o
> > +OBJS-$(CONFIG_COLORSTATS_FILTER)             += vf_colorstats.o
> >  OBJS-$(CONFIG_COLORKEY_FILTER)               += vf_colorkey.o
> >  OBJS-$(CONFIG_COLORKEY_OPENCL_FILTER)        += vf_colorkey_opencl.o
> > opencl.o \
> >                                                  opencl/colorkey.o
> > diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
> > index c295f8e403..6b84a45452 100644
> > --- a/libavfilter/allfilters.c
> > +++ b/libavfilter/allfilters.c
> > @@ -172,6 +172,9 @@ extern AVFilter ff_vf_ciescope;
> >  extern AVFilter ff_vf_codecview;
> >  extern AVFilter ff_vf_colorbalance;
> >  extern AVFilter ff_vf_colorchannelmixer;
> > +extern AVFilter ff_vf_colorstats;
> > +extern AVFilter ff_vf_colorrgbstats;
> > +extern AVFilter ff_vf_coloryuvstats;
> >  extern AVFilter ff_vf_colorkey;
> >  extern AVFilter ff_vf_colorkey_opencl;
> >  extern AVFilter ff_vf_colorhold;
> > diff --git a/libavfilter/vf_colorstats.c b/libavfilter/vf_colorstats.c
> > new file mode 100644
> > index 0000000000..7e94c572f9
> > --- /dev/null
> > +++ b/libavfilter/vf_colorstats.c
> > @@ -0,0 +1,461 @@
> > +/*
> > + * This file is part of FFmpeg.
> > + *
> > + * FFmpeg is free software; you can redistribute it and/or
> > + * modify it under the terms of the GNU Lesser General Public
> > + * License as published by the Free Software Foundation; either
> > + * version 2.1 of the License, or (at your option) any later version.
> > + *
> > + * FFmpeg is distributed in the hope that it will be useful,
> > + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> > + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> > + * Lesser General Public License for more details.
> > + *
> > + * You should have received a copy of the GNU Lesser General Public
> > + * License along with FFmpeg; if not, write to the Free Software
> > + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
> > USA
> > + */
> > +
> > +#include "libavutil/imgutils.h"
> > +#include "libavutil/opt.h"
> > +#include "libavutil/pixdesc.h"
> > +
> > +#include "avfilter.h"
> > +#include "drawutils.h"
> > +#include "filters.h"
> > +
> > +#define R 0
> > +#define G 1
> > +#define B 2
> > +#define A 3
> > +
> > +typedef struct ThreadData {
> > +    AVFrame *in;
> > +} ThreadData;
> > +
> > +typedef struct ColorStatsContext {
> > +    const AVClass *class;
> > +
> > +    ptrdiff_t width[4];
> > +    ptrdiff_t height[4];
> > +
> > +    int planes;
> > +    int step;
> > +    int bitdepth;
> > +    int nb_components;
> > +    int thread_count;
> > +    int is_16bit;
> > +    int is_rgb;
> > +    int force_fmt; /* 0: all, 1: rgb, 2: yuv */
> > +    uint8_t rgba_map[4];
> > +    char comps[4];
> > +
> > +    double *mean[4];
> > +    double *stdev[4];
> > +
> > +    int max;
> > +    double *min_value[4];
> > +    double *max_value[4];
> > +
> > +    int  (*stats_slice)(AVFilterContext *ctx, void *arg, int jobnr, int
> > nb_jobs);
> > +} ColorStatsContext;
> > +
> > +#define OFFSET(x) offsetof(ColorStatsContext, x)
> > +#define V AV_OPT_FLAG_VIDEO_PARAM
> > +#define F AV_OPT_FLAG_FILTERING_PARAM
> > +
> > +static const AVOption options[] = {
> > +    { "planes", "set planes to filter", OFFSET(planes), AV_OPT_TYPE_INT,
> > {.i64=1}, 1, 0xf, V|F},
> > +    {NULL}
> > +};
> > +
> > +#define YUV_FORMATS                                                 \
> > +    AV_PIX_FMT_YUV444P,  AV_PIX_FMT_YUV422P,  AV_PIX_FMT_YUV420P,   \
> > +    AV_PIX_FMT_YUV411P,  AV_PIX_FMT_YUV410P,  AV_PIX_FMT_YUV440P,   \
> > +    AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P,                       \
> > +    AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ440P,                       \
> > +    AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV422P9,                       \
> > +    AV_PIX_FMT_YUV420P9,                                            \
> > +    AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV422P10,                     \
> > +    AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV440P10,                     \
> > +    AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12,                     \
> > +    AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV440P12,                     \
> > +    AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14,                     \
> > +    AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV444P16,                     \
> > +    AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV420P16,                     \
> > +    AV_PIX_FMT_YUVA420P,  AV_PIX_FMT_YUVA422P,                      \
> > +    AV_PIX_FMT_YUVA444P16,AV_PIX_FMT_YUVA422P16,                    \
> > +    AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P12,                    \
> > +    AV_PIX_FMT_YUVA444P12,AV_PIX_FMT_YUVA444P,                      \
> > +    AV_PIX_FMT_GRAY8,     AV_PIX_FMT_GRAY9,   AV_PIX_FMT_GRAY10,    \
> > +    AV_PIX_FMT_GRAY12,    AV_PIX_FMT_GRAY14,  AV_PIX_FMT_GRAY16
> > +
> > +#define RGB_FORMATS                                                 \
> > +    AV_PIX_FMT_0RGB,      AV_PIX_FMT_0BGR,                          \
> > +    AV_PIX_FMT_RGB0,      AV_PIX_FMT_BGR0,                          \
> > +    AV_PIX_FMT_ARGB,      AV_PIX_FMT_RGBA,    AV_PIX_FMT_ABGR,      \
> > +    AV_PIX_FMT_BGRA,      AV_PIX_FMT_RGB24,   AV_PIX_FMT_BGR24,     \
> > +    AV_PIX_FMT_RGB48,     AV_PIX_FMT_RGBA64,  AV_PIX_FMT_GBRP,      \
> > +    AV_PIX_FMT_GBRAP,     AV_PIX_FMT_GBRP9,   AV_PIX_FMT_GBRP10,    \
> > +    AV_PIX_FMT_GBRAP10,   AV_PIX_FMT_GBRP12,  AV_PIX_FMT_GBRP14,    \
> > +    AV_PIX_FMT_GBRP16,    AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16
> > +
> > +static const enum AVPixelFormat yuv_pix_fmts[] = { YUV_FORMATS,
> > AV_PIX_FMT_NONE };
> > +static const enum AVPixelFormat rgb_pix_fmts[] = { RGB_FORMATS,
> > AV_PIX_FMT_NONE };
> > +static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS,
> > YUV_FORMATS, AV_PIX_FMT_NONE };
> > +
> > +static int query_formats(AVFilterContext *ctx)
> > +{
> > +    const ColorStatsContext *s = ctx->priv;
> > +    const enum AVPixelFormat *pix_fmts = s->force_fmt == 1 ? rgb_pix_fmts :
> > +                                         s->force_fmt == 2 ? yuv_pix_fmts :
> > +                                         all_pix_fmts;
> > +
> > +    AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
> > +    if (!fmts_list)
> > +        return AVERROR(ENOMEM);
> > +    return ff_set_common_formats(ctx, fmts_list);
> > +}
> > +
> > +#define DECLARE_STATS_PLANAR_FUNC(nbits, div)
> >                       \
> > +static int stats_slice_planar_##nbits(AVFilterContext *ctx, void *arg, int
> > jobnr, int nb_jobs)    \
> > +{
> >                       \
> > +    const ColorStatsContext *s = ctx->priv;
> >                       \
> > +    ThreadData *td = arg;
> >                       \
> > +    const AVFrame *in = td->in;
> >                       \
> > +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
> >                       \
> > +    int32_t count[4] = { 0 };
> >                       \
> > +    double min_value[4] = { s->max };
> >                       \
> > +    double max_value[4] = { 0 };
> >                       \
> > +
> >                       \
> > +    for (int i = 0; i < s->nb_components; i++) {
> >                       \
> > +        const int width = s->width[i];
> >                       \
> > +        const int height = s->height[i];
> >                       \
> > +        const int slice_start = (height *  jobnr     ) / nb_jobs;
> >                       \
> > +        const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
> >                       \
> > +        int linesize = in->linesize[i] / div;
> >                       \
> > +        uint##nbits##_t *src = (uint##nbits##_t*)in->data[i] + slice_start
> > * linesize;            \
> > +
> >                       \
> > +        if (!(s->planes & (1 << i)))
> >                       \
> > +            continue;
> >                       \
> > +        for (int j = slice_start; j < slice_end; j++) {
> >                       \
> > +            for (int x = 0; x < width; x++) {
> >                       \
> > +                sum[i] += src[x];
> >                       \
> > +                sum2[i] += src[x] * src[x];
> >                       \
> > +                if (src[i] > max_value[i]) max_value[i] = src[i];
> >                       \
> > +                if (src[i] < min_value[i]) min_value[i] = src[i];
> >                       \
> > +            }
> >                       \
> > +            count[i] += width;
> >                       \
> > +            src += linesize;
> >                       \
> > +        }
> >                       \
> > +
> >                       \
> > +        s->mean[i][jobnr] = (double)(sum[i] + count[i] / 2) / count[i];
> >                       \
> > +        s->stdev[i][jobnr] = sqrt((sum2[i] - sum[i] * (double)sum[i] /
> > count[i]) / count[i]);     \
> > +        s->min_value[i][jobnr] = min_value[i];
> >                       \
> > +        s->max_value[i][jobnr] = max_value[i];
> >                       \
> > +    }
> >                       \
> > +
> >                       \
> > +    return 0;
> >                       \
> > +}
> > +DECLARE_STATS_PLANAR_FUNC(8, 1)
> > +DECLARE_STATS_PLANAR_FUNC(16, 2)
> > +
> > +#define DECLARE_STATS_PACKED_FUNC(nbits, div)
> >                       \
> > +static int stats_slice_packed_##nbits(AVFilterContext *ctx, void *arg, int
> > jobnr, int nb_jobs)    \
> > +{
> >                       \
> > +    const ColorStatsContext *s = ctx->priv;
> >                       \
> > +    ThreadData *td = arg;
> >                       \
> > +    const AVFrame *in = td->in;
> >                       \
> > +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
> >                       \
> > +    double min_value[4] = { s->max };
> >                       \
> > +    double max_value[4] = { 0 };
> >                       \
> > +    int32_t count[4] = { 0 };
> >                       \
> > +    const int width = in->width;
> >                       \
> > +    const int height = in->height;
> >                       \
> > +    const int slice_start = (height *  jobnr     ) / nb_jobs;
> >                       \
> > +    const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
> >                       \
> > +    int linesize = in->linesize[0] / div;
> >                       \
> > +    uint##nbits##_t *src = (uint##nbits##_t*)in->data[0] + slice_start *
> > linesize;                \
> > +    const uint8_t ro = s->rgba_map[R];
> >                       \
> > +    const uint8_t go = s->rgba_map[G];
> >                       \
> > +    const uint8_t bo = s->rgba_map[B];
> >                       \
> > +    const uint8_t ao = s->rgba_map[A];
> >                       \
> > +
> >                       \
> > +    for (int y = slice_start; y < slice_end; y++) {
> >                       \
> > +        for (int x = 0; x < width * s->step; x += s->step) {
> >                       \
> > +            const int r = src[x + ro];
> >                       \
> > +            const int g = src[x + go];
> >                       \
> > +            const int b = src[x + bo];
> >                       \
> > +            const int a = src[x + ao];
> >                       \
> > +
> >                       \
> > +            sum[ro] += r;
> >                       \
> > +            sum[go] += g;
> >                       \
> > +            sum[bo] += b;
> >                       \
> > +            sum2[ro] += r * r;
> >                       \
> > +            sum2[go] += g * g;
> >                       \
> > +            sum2[bo] += b * b;
> >                       \
> > +
> >                       \
> > +            if (r > max_value[ro]) max_value[ro] = r;
> >                       \
> > +            if (r < min_value[ro]) min_value[ro] = r;
> >                       \
> > +            if (g > max_value[go]) max_value[go] = g;
> >                       \
> > +            if (g < min_value[go]) min_value[go] = g;
> >                       \
> > +            if (b > max_value[bo]) max_value[bo] = b;
> >                       \
> > +            if (b < min_value[bo]) min_value[bo] = b;
> >                       \
> > +            if (s->step == 4) {
> >                       \
> > +                sum2[ao] += a * a;
> >                       \
> > +                sum[ao] += a;
> >                       \
> > +                if (a > max_value[ao]) max_value[ao] = a;
> >                       \
> > +                if (a < min_value[ao]) min_value[ao] = a;
> >                       \
> > +            }
> >                       \
> > +        }
> >                       \
> > +        count[ro] += width;
> >                       \
> > +        count[go] += width;
> >                       \
> > +        count[bo] += width;
> >                       \
> > +        if (s->step == 4)
> >                       \
> > +            count[ao] += width;
> >                       \
> > +        src += linesize;
> >                       \
> > +    }
> >                       \
> > +
> >                       \
> > +    for (int p = 0; p < s->nb_components; p++) {
> >                       \
> > +        int ci = s->is_rgb ? s->rgba_map[p] : p;
> >                       \
> > +        double variance;
> >                       \
> > +
> >                       \
> > +        s->mean[ci][jobnr] = (double)(sum[ci] + count[ci] / 2) / count[ci];
> >                       \
> > +        variance = (sum2[ci] - sum[ci] * (double)sum[ci] / count[ci]) /
> > count[ci];                \
> > +        s->stdev[ci][jobnr] = sqrt(variance);
> >                       \
> > +        s->min_value[ci][jobnr] = min_value[ci];
> >                       \
> > +        s->max_value[ci][jobnr] = max_value[ci];
> >                       \
> > +    }
> >                       \
> > +
> >                       \
> > +    return 0;
> >                       \
> > +}
> > +DECLARE_STATS_PACKED_FUNC(8, 1)
> > +DECLARE_STATS_PACKED_FUNC(16, 2)
> > +
> > +static av_cold void uninit(AVFilterContext *ctx)
> > +{
> > +    ColorStatsContext *s = ctx->priv;
> > +
> > +    for (int i = 0; i < s->nb_components; i++) {
> > +        av_freep(&s->mean[i]);
> > +        av_freep(&s->stdev[i]);
> > +        av_freep(&s->min_value[i]);
> > +        av_freep(&s->max_value[i]);
> > +    }
> > +}
> > +
> > +static int config_input(AVFilterLink *inlink)
> > +{
> > +    AVFilterContext *ctx = inlink->dst;
> > +    ColorStatsContext *s = ctx->priv;
> > +    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
> > +
> > +    s->nb_components = desc->nb_components;
> > +    s->bitdepth = desc->comp[0].depth;
> > +    s->is_16bit = s->bitdepth > 8;
> > +    s->step = av_get_padded_bits_per_pixel(desc) >> (3 + s->is_16bit);
> > +    s->max  = 1 << s->bitdepth - 1;
> > +
> > +    s->is_rgb = ff_fill_rgba_map(s->rgba_map, inlink->format) >= 0;
> > +    s->comps[0] = s->is_rgb ? 'r' : 'y' ;
> > +    s->comps[1] = s->is_rgb ? 'g' : 'u' ;
> > +    s->comps[2] = s->is_rgb ? 'b' : 'v' ;
> > +    s->comps[3] = 'a';
> > +
> > +    s->thread_count = FFMAX(1, FFMIN(inlink->h,
> > ff_filter_get_nb_threads(ctx)));
> > +    for (int i = 0; i < s->nb_components; i++) {
> > +        ptrdiff_t line_size = av_image_get_linesize(inlink->format,
> > inlink->w, i);
> > +
> > +        s->width[i] = line_size >> (s->bitdepth > 8);
> > +        s->height[i] = inlink->h >> ((i == 1 || i == 2) ?
> > desc->log2_chroma_h : 0);
> > +
> > +        s->mean[i] = av_mallocz_array(s->thread_count,
> > sizeof(*s->mean[i]));
> > +        s->stdev[i] = av_mallocz_array(s->thread_count,
> > sizeof(*s->stdev[i]));
> > +        s->max_value[i] = av_mallocz_array(s->thread_count,
> > sizeof(*s->max_value[i]));
> > +        s->min_value[i] = av_mallocz_array(s->thread_count,
> > sizeof(*s->min_value[i]));
> > +        if (!s->mean[i] || !s->stdev[i] || !s->max_value[i] ||
> > !s->min_value[i])
> > +            return AVERROR(ENOMEM);
> > +        for (int j = 0; j < s->thread_count; j++) {
> > +            s->min_value[i][j] = (1 << s->bitdepth);
> > +            s->max_value[i][j] = 0;
> > +        }
> > +    }
> > +
> > +    if (desc->flags & AV_PIX_FMT_FLAG_PLANAR)
> > +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_planar_8 :
> > stats_slice_planar_16;
> > +    else
> > +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_packed_8 :
> > stats_slice_packed_16;
> > +
> > +    return 0;
> > +}
> > +
> > +static void set_meta_float(AVDictionary **metadata, const char *key, char
> > c, float d)
> > +{
> > +    char value[128];
> > +    char key2[128];
> > +
> > +    snprintf(value, sizeof(value), "%.2f", d);
> > +    if (c)
> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
> > +    else
> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
> > +    av_dict_set(metadata, key2, value, 0);
> > +}
> > +
> > +static void set_meta_int(AVDictionary **metadata, const char *key, char c,
> > int d)
> > +{
> > +    char value[128];
> > +    char key2[128];
> > +
> > +    snprintf(value, sizeof(value), "%d", d);
> > +    if (c)
> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
> > +    else
> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
> > +    av_dict_set(metadata, key2, value, 0);
> > +}
> > +
> > +static void report_detect_result(AVFilterContext *ctx, AVFrame *in)
> > +{
> > +    const ColorStatsContext *s = ctx->priv;
> > +    double mean[4] = { 0 };
> > +    double stdev[4] = { 0 };
> > +    double min_value[4] = { s->max };
> > +    double max_value[4] = { 0 };
> > +    int cidx;
> > +
> > +    for (int p = 0; p < s->nb_components; p++) {
> > +        cidx = s->is_rgb ? s->rgba_map[p] : p;
> > +
> > +        if (!(s->planes & (1 << p)))
> > +            continue;
> > +
> > +        for (int j = 0; j < s->thread_count; j++) {
> > +            mean[cidx] += s->mean[cidx][j];
> > +            stdev[cidx] += s->stdev[cidx][j];
> > +            if (s->min_value[cidx][j] < min_value[cidx])
> > +                min_value[cidx] = s->min_value[cidx][j];
> > +            if (s->max_value[cidx][j] > max_value[cidx])
> > +                max_value[cidx] = s->max_value[cidx][j];
> > +        }
> > +        mean[cidx] = mean[cidx] / s->thread_count;
> > +        stdev[cidx] = stdev[cidx] / s->thread_count;
> > +
> > +        set_meta_int(&in->metadata, "min", s->comps[p], min_value[cidx]);
> > +        set_meta_int(&in->metadata, "max", s->comps[p], max_value[cidx]);
> > +        set_meta_int(&in->metadata, "mean", s->comps[p], mean[cidx]);
> > +        set_meta_int(&in->metadata, "stdev", s->comps[p], stdev[cidx]);
> > +
> > +        set_meta_float(&in->metadata, "pmin", s->comps[p], min_value[cidx]
> > / s->max);
> > +        set_meta_float(&in->metadata, "pmax", s->comps[p], max_value[cidx]
> > / s->max);
> > +        set_meta_float(&in->metadata, "pmean", s->comps[p], mean[cidx] /
> > s->max);
> > +        set_meta_float(&in->metadata, "pstdev", s->comps[p], stdev[cidx] /
> > s->max);
> > +    }
> > +}
> > +
> > +static int activate(AVFilterContext *ctx)
> > +{
> > +    int ret;
> > +    AVFilterLink *inlink = ctx->inputs[0];
> > +    AVFilterLink *outlink = ctx->outputs[0];
> > +    ColorStatsContext *s = ctx->priv;
> > +    AVFrame *in;
> > +    ThreadData td;
> > +
> > +    FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
> > +
> > +    ret = ff_inlink_consume_frame(inlink, &in);
> > +    if (ret < 0)
> > +        return ret;
> > +
> > +    if (in) {
> > +        td.in = in;
> > +        ctx->internal->execute(ctx, s->stats_slice, &td, NULL,
> > s->thread_count);
> > +
> > +        report_detect_result(ctx, in);
> > +        return ff_filter_frame(outlink, in);
> > +    }
> > +
> > +    FF_FILTER_FORWARD_STATUS(inlink, outlink);
> > +    FF_FILTER_FORWARD_WANTED(outlink, inlink);
> > +
> > +    return FFERROR_NOT_READY;
> > +}
> > +
> > +static const AVFilterPad inputs[] = {
> > +    {
> > +        .name         = "default",
> > +        .type         = AVMEDIA_TYPE_VIDEO,
> > +        .config_props = config_input,
> > +    },
> > +    { NULL }
> > +};
> > +
> > +static const AVFilterPad outputs[] = {
> > +    {
> > +        .name          = "default",
> > +        .type          = AVMEDIA_TYPE_VIDEO,
> > +    },
> > +    { NULL }
> > +};
> > +
> > +#define DEFINE_COLOR_FILTER(name_, description_)                        \
> > +    AVFilter ff_vf_##name_ = {                                          \
> > +        .name          = #name_,                                        \
> > +        .description   = NULL_IF_CONFIG_SMALL(description_),            \
> > +        .priv_size     = sizeof(ColorStatsContext),                     \
> > +        .priv_class    = &name_ ## _class,                              \
> > +        .init          = name_##_init,                                  \
> > +        .uninit        = uninit,                                        \
> > +        .query_formats = query_formats,                                 \
> > +        .inputs        = inputs,                                        \
> > +        .outputs       = outputs,                                       \
> > +        .activate      = activate,                                      \
> > +        .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC |       \
> > +                         AVFILTER_FLAG_SLICE_THREADS,                   \
> > +    }
> > +
> > +#if CONFIG_COLORSTATS_FILTER
> > +
> > +#define colorstats_options options
> > +AVFILTER_DEFINE_CLASS(colorstats);
> > +
> > +static int colorstats_init(AVFilterContext *ctx)
> > +{
> > +    return 0;
> > +}
> > +
> > +DEFINE_COLOR_FILTER(colorstats, "Video color stats.");
> > +#endif
> > +
> > +#if CONFIG_COLORRGBSTATS_FILTER
> > +
> > +#define colorrgbstats_options options
> > +AVFILTER_DEFINE_CLASS(colorrgbstats);
> > +
> > +static int colorrgbstats_init(AVFilterContext *ctx)
> > +{
> > +    ColorStatsContext *s = ctx->priv;
> > +
> > +    s->force_fmt = 1;
> > +    return 0;
> > +}
> > +
> > +DEFINE_COLOR_FILTER(colorrgbstats, "Video RGB color stats.");
> > +#endif
> > +
> > +#if CONFIG_COLORYUVSTATS_FILTER
> > +
> > +#define coloryuvstats_options options
> > +AVFILTER_DEFINE_CLASS(coloryuvstats);
> > +
> > +static int coloryuvstats_init(AVFilterContext *ctx)
> > +{
> > +    ColorStatsContext *s = ctx->priv;
> > +
> > +    s->force_fmt = 2;
> > +    return 0;
> > +}
> > +
> > +DEFINE_COLOR_FILTER(coloryuvstats, "Video YUV color stats.");
> > +#endif
> > --
> > 2.21.0
> >
> > _______________________________________________
> > ffmpeg-devel mailing list
> > ffmpeg-devel@ffmpeg.org
> > https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
> >
> > To unsubscribe, visit link above, or email
> > ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
Paul B Mahol Dec. 27, 2019, 2:20 p.m. UTC | #3
On 12/27/19, Limin Wang <lance.lmwang@gmail.com> wrote:
> On Fri, Dec 27, 2019 at 12:35:25PM +0100, Paul B Mahol wrote:
>> You are duplicating some functionality of signalstats filter.
>>
> Yes, I have other function need to use the mean and stdev which is
> support in showinfo filter(only 8bit and don't support packed format,
> no multi-thread), and signalstats don't support rgb format and don't
> have stdev, also it have too many other function and difficult to change
> it, so I think it's more simple to create a new filter to do it.
>

No, unacceptable. use signalstats filter.

>
>> On 12/27/19, lance.lmwang@gmail.com <lance.lmwang@gmail.com> wrote:
>> > From: Limin Wang <lance.lmwang@gmail.com>
>> >
>> > Signed-off-by: Limin Wang <lance.lmwang@gmail.com>
>> > ---
>> >  doc/filters.texi            |  74 ++++++
>> >  libavfilter/Makefile        |   1 +
>> >  libavfilter/allfilters.c    |   3 +
>> >  libavfilter/vf_colorstats.c | 461 ++++++++++++++++++++++++++++++++++++
>> >  4 files changed, 539 insertions(+)
>> >  create mode 100644 libavfilter/vf_colorstats.c
>> >
>> > diff --git a/doc/filters.texi b/doc/filters.texi
>> > index 8c5d3a5760..81968b2c17 100644
>> > --- a/doc/filters.texi
>> > +++ b/doc/filters.texi
>> > @@ -7695,6 +7695,80 @@ For example to convert the input to SMPTE-240M,
>> > use
>> > the command:
>> >  colorspace=smpte240m
>> >  @end example
>> >
>> > +@section colorstats, colorrgbstats, coloryuvstats
>> > +The filter provides statistical video measurements such as mean,
>> > minimum,
>> > maximum and
>> > +standard deviation for each frame. The user can check for
>> > unexpected/accidental errors
>> > +very quickly with them.
>> > +
>> > +@var{colorrgbstats} report the color stats for RGB input video,
>> > @var{coloryuvstats}
>> > +to an YUV input video.
>> > +
>> > +These filters accept the following parameters:
>> > +@table @option
>> > +@item planes
>> > +Set which planes to filter. Default is only the first plane.
>> > +@end table
>> > +
>> > +By default the filter will report these metadata values if the planes
>> > +are processed:
>> > +
>> > +@table @option
>> > +@item min.y, min.u, min.v, min.r, min.g, min.b, min.a
>> > +Display the minimal Y/U/V/R/G/B/A plane value contained within the
>> > input
>> > frame.
>> > +Expressed in range of [0, 1<<bitdepth-1]
>> > +
>> > +@item pmin.y, pmin.u, pmin.v, pmin.r, pmin.g, pmin.b, min.a
>> > +Display the minimal Y/U/V/R/G/B/A plane percentage of maximum contained
>> > within
>> > +the input frame. Expressed in range of [0, 1]
>> > +
>> > +@item max.y, max.u, max.v, max.r, max.g, max.b, max.a
>> > +Display the maximum Y/U/V/R/G/B/A plane value contained within the
>> > input
>> > frame.
>> > +Expressed in range of [0, 1<<bitdepth-1]
>> > +
>> > +@item pmax.y, pmax.u, pmax.v, pmax.r, pmax.g, pmax.b, pmax.a
>> > +Display the maximum Y/U/V/R/G/B/A plane percentage of maximum contained
>> > within
>> > +the input frame. Expressed in range of [0, 1]
>> > +
>> > +@item mean.y, mean.u, mean.v, mean.r, mean.g, mean.b, mean.a
>> > +Display the Y/U/V/R/G/B/A plane mean value contained within the input
>> > frame.
>> > +Expressed in range of [0, 1<<bitdepth-1]
>> > +
>> > +@item pmean.y, pmean.u, pmean.v, pmean.r, pmean.g, pmean.b, pmean.a
>> > +Display the Y/U/V/R/G/B/A plane mean value percentage of maximum
>> > contained
>> > within
>> > +the input frame. Expressed in range of [0, 1]
>> > +
>> > +@item stdev.y, stdev.u, stdev.v, stdev.r, stdev.g, stdev.b, stdev.a
>> > +Display the Y/U/V/R/G/B/A plane standard deviation value contained
>> > within
>> > the
>> > +input frame. Expressed in range of [0, 1<<bitdepth-1]
>> > +
>> > +@item pstdev.y, pstdev.u, pstdev.v, pstdev.r, pstdev.g, pstdev.b,
>> > pstdev.a
>> > +Display the Y/U/V/R/G/B/A plane standard deviation value percentage of
>> > maximum contained
>> > +within the input frame. Expressed in range of [0, 1]
>> > +@end table
>> > +
>> > +@subsection Examples
>> > +
>> > +@itemize
>> > +@item
>> > +Show all YUV color stats for each frame:
>> > +@example
>> > +ffprobe -f lavfi movie=example.mov,coloryuvstats=planes=0xf
>> > -show_frames
>> > +@end example
>> > +
>> > +@item
>> > +Draw graph for the pmean and pstdev value of the Y plane per frame:
>> > +@example
>> > +ffplay -i example.mov -vf
>> > coloryuvstats,drawgraph=m1=lavf.colorstats.pmean.y:m2=lavf.colorstats.pstdev.y:min=0:max=1
>> > +@end example
>> > +
>> > +@item
>> > +Print all RGB color stats for each frame:
>> > +@example
>> > +ffplay -i example.mov -vf colorrgbstats=planes=0xf,metadata=mode=print
>> > +@end example
>> > +
>> > +@end itemize
>> > +
>> >  @section convolution
>> >
>> >  Apply convolution of 3x3, 5x5, 7x7 or horizontal/vertical up to 49
>> > elements.
>> > diff --git a/libavfilter/Makefile b/libavfilter/Makefile
>> > index 37d4eee858..a007bd32d1 100644
>> > --- a/libavfilter/Makefile
>> > +++ b/libavfilter/Makefile
>> > @@ -182,6 +182,7 @@ OBJS-$(CONFIG_CIESCOPE_FILTER)               +=
>> > vf_ciescope.o
>> >  OBJS-$(CONFIG_CODECVIEW_FILTER)              += vf_codecview.o
>> >  OBJS-$(CONFIG_COLORBALANCE_FILTER)           += vf_colorbalance.o
>> >  OBJS-$(CONFIG_COLORCHANNELMIXER_FILTER)      += vf_colorchannelmixer.o
>> > +OBJS-$(CONFIG_COLORSTATS_FILTER)             += vf_colorstats.o
>> >  OBJS-$(CONFIG_COLORKEY_FILTER)               += vf_colorkey.o
>> >  OBJS-$(CONFIG_COLORKEY_OPENCL_FILTER)        += vf_colorkey_opencl.o
>> > opencl.o \
>> >                                                  opencl/colorkey.o
>> > diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
>> > index c295f8e403..6b84a45452 100644
>> > --- a/libavfilter/allfilters.c
>> > +++ b/libavfilter/allfilters.c
>> > @@ -172,6 +172,9 @@ extern AVFilter ff_vf_ciescope;
>> >  extern AVFilter ff_vf_codecview;
>> >  extern AVFilter ff_vf_colorbalance;
>> >  extern AVFilter ff_vf_colorchannelmixer;
>> > +extern AVFilter ff_vf_colorstats;
>> > +extern AVFilter ff_vf_colorrgbstats;
>> > +extern AVFilter ff_vf_coloryuvstats;
>> >  extern AVFilter ff_vf_colorkey;
>> >  extern AVFilter ff_vf_colorkey_opencl;
>> >  extern AVFilter ff_vf_colorhold;
>> > diff --git a/libavfilter/vf_colorstats.c b/libavfilter/vf_colorstats.c
>> > new file mode 100644
>> > index 0000000000..7e94c572f9
>> > --- /dev/null
>> > +++ b/libavfilter/vf_colorstats.c
>> > @@ -0,0 +1,461 @@
>> > +/*
>> > + * This file is part of FFmpeg.
>> > + *
>> > + * FFmpeg is free software; you can redistribute it and/or
>> > + * modify it under the terms of the GNU Lesser General Public
>> > + * License as published by the Free Software Foundation; either
>> > + * version 2.1 of the License, or (at your option) any later version.
>> > + *
>> > + * FFmpeg is distributed in the hope that it will be useful,
>> > + * but WITHOUT ANY WARRANTY; without even the implied warranty of
>> > + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
>> > + * Lesser General Public License for more details.
>> > + *
>> > + * You should have received a copy of the GNU Lesser General Public
>> > + * License along with FFmpeg; if not, write to the Free Software
>> > + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
>> > 02110-1301
>> > USA
>> > + */
>> > +
>> > +#include "libavutil/imgutils.h"
>> > +#include "libavutil/opt.h"
>> > +#include "libavutil/pixdesc.h"
>> > +
>> > +#include "avfilter.h"
>> > +#include "drawutils.h"
>> > +#include "filters.h"
>> > +
>> > +#define R 0
>> > +#define G 1
>> > +#define B 2
>> > +#define A 3
>> > +
>> > +typedef struct ThreadData {
>> > +    AVFrame *in;
>> > +} ThreadData;
>> > +
>> > +typedef struct ColorStatsContext {
>> > +    const AVClass *class;
>> > +
>> > +    ptrdiff_t width[4];
>> > +    ptrdiff_t height[4];
>> > +
>> > +    int planes;
>> > +    int step;
>> > +    int bitdepth;
>> > +    int nb_components;
>> > +    int thread_count;
>> > +    int is_16bit;
>> > +    int is_rgb;
>> > +    int force_fmt; /* 0: all, 1: rgb, 2: yuv */
>> > +    uint8_t rgba_map[4];
>> > +    char comps[4];
>> > +
>> > +    double *mean[4];
>> > +    double *stdev[4];
>> > +
>> > +    int max;
>> > +    double *min_value[4];
>> > +    double *max_value[4];
>> > +
>> > +    int  (*stats_slice)(AVFilterContext *ctx, void *arg, int jobnr, int
>> > nb_jobs);
>> > +} ColorStatsContext;
>> > +
>> > +#define OFFSET(x) offsetof(ColorStatsContext, x)
>> > +#define V AV_OPT_FLAG_VIDEO_PARAM
>> > +#define F AV_OPT_FLAG_FILTERING_PARAM
>> > +
>> > +static const AVOption options[] = {
>> > +    { "planes", "set planes to filter", OFFSET(planes),
>> > AV_OPT_TYPE_INT,
>> > {.i64=1}, 1, 0xf, V|F},
>> > +    {NULL}
>> > +};
>> > +
>> > +#define YUV_FORMATS                                                 \
>> > +    AV_PIX_FMT_YUV444P,  AV_PIX_FMT_YUV422P,  AV_PIX_FMT_YUV420P,   \
>> > +    AV_PIX_FMT_YUV411P,  AV_PIX_FMT_YUV410P,  AV_PIX_FMT_YUV440P,   \
>> > +    AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P,                       \
>> > +    AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ440P,                       \
>> > +    AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV422P9,                       \
>> > +    AV_PIX_FMT_YUV420P9,                                            \
>> > +    AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV422P10,                     \
>> > +    AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV440P10,                     \
>> > +    AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12,                     \
>> > +    AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV440P12,                     \
>> > +    AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14,                     \
>> > +    AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV444P16,                     \
>> > +    AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV420P16,                     \
>> > +    AV_PIX_FMT_YUVA420P,  AV_PIX_FMT_YUVA422P,                      \
>> > +    AV_PIX_FMT_YUVA444P16,AV_PIX_FMT_YUVA422P16,                    \
>> > +    AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P12,                    \
>> > +    AV_PIX_FMT_YUVA444P12,AV_PIX_FMT_YUVA444P,                      \
>> > +    AV_PIX_FMT_GRAY8,     AV_PIX_FMT_GRAY9,   AV_PIX_FMT_GRAY10,    \
>> > +    AV_PIX_FMT_GRAY12,    AV_PIX_FMT_GRAY14,  AV_PIX_FMT_GRAY16
>> > +
>> > +#define RGB_FORMATS                                                 \
>> > +    AV_PIX_FMT_0RGB,      AV_PIX_FMT_0BGR,                          \
>> > +    AV_PIX_FMT_RGB0,      AV_PIX_FMT_BGR0,                          \
>> > +    AV_PIX_FMT_ARGB,      AV_PIX_FMT_RGBA,    AV_PIX_FMT_ABGR,      \
>> > +    AV_PIX_FMT_BGRA,      AV_PIX_FMT_RGB24,   AV_PIX_FMT_BGR24,     \
>> > +    AV_PIX_FMT_RGB48,     AV_PIX_FMT_RGBA64,  AV_PIX_FMT_GBRP,      \
>> > +    AV_PIX_FMT_GBRAP,     AV_PIX_FMT_GBRP9,   AV_PIX_FMT_GBRP10,    \
>> > +    AV_PIX_FMT_GBRAP10,   AV_PIX_FMT_GBRP12,  AV_PIX_FMT_GBRP14,    \
>> > +    AV_PIX_FMT_GBRP16,    AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16
>> > +
>> > +static const enum AVPixelFormat yuv_pix_fmts[] = { YUV_FORMATS,
>> > AV_PIX_FMT_NONE };
>> > +static const enum AVPixelFormat rgb_pix_fmts[] = { RGB_FORMATS,
>> > AV_PIX_FMT_NONE };
>> > +static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS,
>> > YUV_FORMATS, AV_PIX_FMT_NONE };
>> > +
>> > +static int query_formats(AVFilterContext *ctx)
>> > +{
>> > +    const ColorStatsContext *s = ctx->priv;
>> > +    const enum AVPixelFormat *pix_fmts = s->force_fmt == 1 ?
>> > rgb_pix_fmts :
>> > +                                         s->force_fmt == 2 ?
>> > yuv_pix_fmts :
>> > +                                         all_pix_fmts;
>> > +
>> > +    AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
>> > +    if (!fmts_list)
>> > +        return AVERROR(ENOMEM);
>> > +    return ff_set_common_formats(ctx, fmts_list);
>> > +}
>> > +
>> > +#define DECLARE_STATS_PLANAR_FUNC(nbits, div)
>> >                       \
>> > +static int stats_slice_planar_##nbits(AVFilterContext *ctx, void *arg,
>> > int
>> > jobnr, int nb_jobs)    \
>> > +{
>> >                       \
>> > +    const ColorStatsContext *s = ctx->priv;
>> >                       \
>> > +    ThreadData *td = arg;
>> >                       \
>> > +    const AVFrame *in = td->in;
>> >                       \
>> > +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
>> >                       \
>> > +    int32_t count[4] = { 0 };
>> >                       \
>> > +    double min_value[4] = { s->max };
>> >                       \
>> > +    double max_value[4] = { 0 };
>> >                       \
>> > +
>> >                       \
>> > +    for (int i = 0; i < s->nb_components; i++) {
>> >                       \
>> > +        const int width = s->width[i];
>> >                       \
>> > +        const int height = s->height[i];
>> >                       \
>> > +        const int slice_start = (height *  jobnr     ) / nb_jobs;
>> >                       \
>> > +        const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
>> >                       \
>> > +        int linesize = in->linesize[i] / div;
>> >                       \
>> > +        uint##nbits##_t *src = (uint##nbits##_t*)in->data[i] +
>> > slice_start
>> > * linesize;            \
>> > +
>> >                       \
>> > +        if (!(s->planes & (1 << i)))
>> >                       \
>> > +            continue;
>> >                       \
>> > +        for (int j = slice_start; j < slice_end; j++) {
>> >                       \
>> > +            for (int x = 0; x < width; x++) {
>> >                       \
>> > +                sum[i] += src[x];
>> >                       \
>> > +                sum2[i] += src[x] * src[x];
>> >                       \
>> > +                if (src[i] > max_value[i]) max_value[i] = src[i];
>> >                       \
>> > +                if (src[i] < min_value[i]) min_value[i] = src[i];
>> >                       \
>> > +            }
>> >                       \
>> > +            count[i] += width;
>> >                       \
>> > +            src += linesize;
>> >                       \
>> > +        }
>> >                       \
>> > +
>> >                       \
>> > +        s->mean[i][jobnr] = (double)(sum[i] + count[i] / 2) / count[i];
>> >                       \
>> > +        s->stdev[i][jobnr] = sqrt((sum2[i] - sum[i] * (double)sum[i] /
>> > count[i]) / count[i]);     \
>> > +        s->min_value[i][jobnr] = min_value[i];
>> >                       \
>> > +        s->max_value[i][jobnr] = max_value[i];
>> >                       \
>> > +    }
>> >                       \
>> > +
>> >                       \
>> > +    return 0;
>> >                       \
>> > +}
>> > +DECLARE_STATS_PLANAR_FUNC(8, 1)
>> > +DECLARE_STATS_PLANAR_FUNC(16, 2)
>> > +
>> > +#define DECLARE_STATS_PACKED_FUNC(nbits, div)
>> >                       \
>> > +static int stats_slice_packed_##nbits(AVFilterContext *ctx, void *arg,
>> > int
>> > jobnr, int nb_jobs)    \
>> > +{
>> >                       \
>> > +    const ColorStatsContext *s = ctx->priv;
>> >                       \
>> > +    ThreadData *td = arg;
>> >                       \
>> > +    const AVFrame *in = td->in;
>> >                       \
>> > +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
>> >                       \
>> > +    double min_value[4] = { s->max };
>> >                       \
>> > +    double max_value[4] = { 0 };
>> >                       \
>> > +    int32_t count[4] = { 0 };
>> >                       \
>> > +    const int width = in->width;
>> >                       \
>> > +    const int height = in->height;
>> >                       \
>> > +    const int slice_start = (height *  jobnr     ) / nb_jobs;
>> >                       \
>> > +    const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
>> >                       \
>> > +    int linesize = in->linesize[0] / div;
>> >                       \
>> > +    uint##nbits##_t *src = (uint##nbits##_t*)in->data[0] + slice_start
>> > *
>> > linesize;                \
>> > +    const uint8_t ro = s->rgba_map[R];
>> >                       \
>> > +    const uint8_t go = s->rgba_map[G];
>> >                       \
>> > +    const uint8_t bo = s->rgba_map[B];
>> >                       \
>> > +    const uint8_t ao = s->rgba_map[A];
>> >                       \
>> > +
>> >                       \
>> > +    for (int y = slice_start; y < slice_end; y++) {
>> >                       \
>> > +        for (int x = 0; x < width * s->step; x += s->step) {
>> >                       \
>> > +            const int r = src[x + ro];
>> >                       \
>> > +            const int g = src[x + go];
>> >                       \
>> > +            const int b = src[x + bo];
>> >                       \
>> > +            const int a = src[x + ao];
>> >                       \
>> > +
>> >                       \
>> > +            sum[ro] += r;
>> >                       \
>> > +            sum[go] += g;
>> >                       \
>> > +            sum[bo] += b;
>> >                       \
>> > +            sum2[ro] += r * r;
>> >                       \
>> > +            sum2[go] += g * g;
>> >                       \
>> > +            sum2[bo] += b * b;
>> >                       \
>> > +
>> >                       \
>> > +            if (r > max_value[ro]) max_value[ro] = r;
>> >                       \
>> > +            if (r < min_value[ro]) min_value[ro] = r;
>> >                       \
>> > +            if (g > max_value[go]) max_value[go] = g;
>> >                       \
>> > +            if (g < min_value[go]) min_value[go] = g;
>> >                       \
>> > +            if (b > max_value[bo]) max_value[bo] = b;
>> >                       \
>> > +            if (b < min_value[bo]) min_value[bo] = b;
>> >                       \
>> > +            if (s->step == 4) {
>> >                       \
>> > +                sum2[ao] += a * a;
>> >                       \
>> > +                sum[ao] += a;
>> >                       \
>> > +                if (a > max_value[ao]) max_value[ao] = a;
>> >                       \
>> > +                if (a < min_value[ao]) min_value[ao] = a;
>> >                       \
>> > +            }
>> >                       \
>> > +        }
>> >                       \
>> > +        count[ro] += width;
>> >                       \
>> > +        count[go] += width;
>> >                       \
>> > +        count[bo] += width;
>> >                       \
>> > +        if (s->step == 4)
>> >                       \
>> > +            count[ao] += width;
>> >                       \
>> > +        src += linesize;
>> >                       \
>> > +    }
>> >                       \
>> > +
>> >                       \
>> > +    for (int p = 0; p < s->nb_components; p++) {
>> >                       \
>> > +        int ci = s->is_rgb ? s->rgba_map[p] : p;
>> >                       \
>> > +        double variance;
>> >                       \
>> > +
>> >                       \
>> > +        s->mean[ci][jobnr] = (double)(sum[ci] + count[ci] / 2) /
>> > count[ci];
>> >                       \
>> > +        variance = (sum2[ci] - sum[ci] * (double)sum[ci] / count[ci]) /
>> > count[ci];                \
>> > +        s->stdev[ci][jobnr] = sqrt(variance);
>> >                       \
>> > +        s->min_value[ci][jobnr] = min_value[ci];
>> >                       \
>> > +        s->max_value[ci][jobnr] = max_value[ci];
>> >                       \
>> > +    }
>> >                       \
>> > +
>> >                       \
>> > +    return 0;
>> >                       \
>> > +}
>> > +DECLARE_STATS_PACKED_FUNC(8, 1)
>> > +DECLARE_STATS_PACKED_FUNC(16, 2)
>> > +
>> > +static av_cold void uninit(AVFilterContext *ctx)
>> > +{
>> > +    ColorStatsContext *s = ctx->priv;
>> > +
>> > +    for (int i = 0; i < s->nb_components; i++) {
>> > +        av_freep(&s->mean[i]);
>> > +        av_freep(&s->stdev[i]);
>> > +        av_freep(&s->min_value[i]);
>> > +        av_freep(&s->max_value[i]);
>> > +    }
>> > +}
>> > +
>> > +static int config_input(AVFilterLink *inlink)
>> > +{
>> > +    AVFilterContext *ctx = inlink->dst;
>> > +    ColorStatsContext *s = ctx->priv;
>> > +    const AVPixFmtDescriptor *desc =
>> > av_pix_fmt_desc_get(inlink->format);
>> > +
>> > +    s->nb_components = desc->nb_components;
>> > +    s->bitdepth = desc->comp[0].depth;
>> > +    s->is_16bit = s->bitdepth > 8;
>> > +    s->step = av_get_padded_bits_per_pixel(desc) >> (3 + s->is_16bit);
>> > +    s->max  = 1 << s->bitdepth - 1;
>> > +
>> > +    s->is_rgb = ff_fill_rgba_map(s->rgba_map, inlink->format) >= 0;
>> > +    s->comps[0] = s->is_rgb ? 'r' : 'y' ;
>> > +    s->comps[1] = s->is_rgb ? 'g' : 'u' ;
>> > +    s->comps[2] = s->is_rgb ? 'b' : 'v' ;
>> > +    s->comps[3] = 'a';
>> > +
>> > +    s->thread_count = FFMAX(1, FFMIN(inlink->h,
>> > ff_filter_get_nb_threads(ctx)));
>> > +    for (int i = 0; i < s->nb_components; i++) {
>> > +        ptrdiff_t line_size = av_image_get_linesize(inlink->format,
>> > inlink->w, i);
>> > +
>> > +        s->width[i] = line_size >> (s->bitdepth > 8);
>> > +        s->height[i] = inlink->h >> ((i == 1 || i == 2) ?
>> > desc->log2_chroma_h : 0);
>> > +
>> > +        s->mean[i] = av_mallocz_array(s->thread_count,
>> > sizeof(*s->mean[i]));
>> > +        s->stdev[i] = av_mallocz_array(s->thread_count,
>> > sizeof(*s->stdev[i]));
>> > +        s->max_value[i] = av_mallocz_array(s->thread_count,
>> > sizeof(*s->max_value[i]));
>> > +        s->min_value[i] = av_mallocz_array(s->thread_count,
>> > sizeof(*s->min_value[i]));
>> > +        if (!s->mean[i] || !s->stdev[i] || !s->max_value[i] ||
>> > !s->min_value[i])
>> > +            return AVERROR(ENOMEM);
>> > +        for (int j = 0; j < s->thread_count; j++) {
>> > +            s->min_value[i][j] = (1 << s->bitdepth);
>> > +            s->max_value[i][j] = 0;
>> > +        }
>> > +    }
>> > +
>> > +    if (desc->flags & AV_PIX_FMT_FLAG_PLANAR)
>> > +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_planar_8 :
>> > stats_slice_planar_16;
>> > +    else
>> > +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_packed_8 :
>> > stats_slice_packed_16;
>> > +
>> > +    return 0;
>> > +}
>> > +
>> > +static void set_meta_float(AVDictionary **metadata, const char *key,
>> > char
>> > c, float d)
>> > +{
>> > +    char value[128];
>> > +    char key2[128];
>> > +
>> > +    snprintf(value, sizeof(value), "%.2f", d);
>> > +    if (c)
>> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
>> > +    else
>> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
>> > +    av_dict_set(metadata, key2, value, 0);
>> > +}
>> > +
>> > +static void set_meta_int(AVDictionary **metadata, const char *key, char
>> > c,
>> > int d)
>> > +{
>> > +    char value[128];
>> > +    char key2[128];
>> > +
>> > +    snprintf(value, sizeof(value), "%d", d);
>> > +    if (c)
>> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
>> > +    else
>> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
>> > +    av_dict_set(metadata, key2, value, 0);
>> > +}
>> > +
>> > +static void report_detect_result(AVFilterContext *ctx, AVFrame *in)
>> > +{
>> > +    const ColorStatsContext *s = ctx->priv;
>> > +    double mean[4] = { 0 };
>> > +    double stdev[4] = { 0 };
>> > +    double min_value[4] = { s->max };
>> > +    double max_value[4] = { 0 };
>> > +    int cidx;
>> > +
>> > +    for (int p = 0; p < s->nb_components; p++) {
>> > +        cidx = s->is_rgb ? s->rgba_map[p] : p;
>> > +
>> > +        if (!(s->planes & (1 << p)))
>> > +            continue;
>> > +
>> > +        for (int j = 0; j < s->thread_count; j++) {
>> > +            mean[cidx] += s->mean[cidx][j];
>> > +            stdev[cidx] += s->stdev[cidx][j];
>> > +            if (s->min_value[cidx][j] < min_value[cidx])
>> > +                min_value[cidx] = s->min_value[cidx][j];
>> > +            if (s->max_value[cidx][j] > max_value[cidx])
>> > +                max_value[cidx] = s->max_value[cidx][j];
>> > +        }
>> > +        mean[cidx] = mean[cidx] / s->thread_count;
>> > +        stdev[cidx] = stdev[cidx] / s->thread_count;
>> > +
>> > +        set_meta_int(&in->metadata, "min", s->comps[p],
>> > min_value[cidx]);
>> > +        set_meta_int(&in->metadata, "max", s->comps[p],
>> > max_value[cidx]);
>> > +        set_meta_int(&in->metadata, "mean", s->comps[p], mean[cidx]);
>> > +        set_meta_int(&in->metadata, "stdev", s->comps[p], stdev[cidx]);
>> > +
>> > +        set_meta_float(&in->metadata, "pmin", s->comps[p],
>> > min_value[cidx]
>> > / s->max);
>> > +        set_meta_float(&in->metadata, "pmax", s->comps[p],
>> > max_value[cidx]
>> > / s->max);
>> > +        set_meta_float(&in->metadata, "pmean", s->comps[p], mean[cidx]
>> > /
>> > s->max);
>> > +        set_meta_float(&in->metadata, "pstdev", s->comps[p],
>> > stdev[cidx] /
>> > s->max);
>> > +    }
>> > +}
>> > +
>> > +static int activate(AVFilterContext *ctx)
>> > +{
>> > +    int ret;
>> > +    AVFilterLink *inlink = ctx->inputs[0];
>> > +    AVFilterLink *outlink = ctx->outputs[0];
>> > +    ColorStatsContext *s = ctx->priv;
>> > +    AVFrame *in;
>> > +    ThreadData td;
>> > +
>> > +    FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
>> > +
>> > +    ret = ff_inlink_consume_frame(inlink, &in);
>> > +    if (ret < 0)
>> > +        return ret;
>> > +
>> > +    if (in) {
>> > +        td.in = in;
>> > +        ctx->internal->execute(ctx, s->stats_slice, &td, NULL,
>> > s->thread_count);
>> > +
>> > +        report_detect_result(ctx, in);
>> > +        return ff_filter_frame(outlink, in);
>> > +    }
>> > +
>> > +    FF_FILTER_FORWARD_STATUS(inlink, outlink);
>> > +    FF_FILTER_FORWARD_WANTED(outlink, inlink);
>> > +
>> > +    return FFERROR_NOT_READY;
>> > +}
>> > +
>> > +static const AVFilterPad inputs[] = {
>> > +    {
>> > +        .name         = "default",
>> > +        .type         = AVMEDIA_TYPE_VIDEO,
>> > +        .config_props = config_input,
>> > +    },
>> > +    { NULL }
>> > +};
>> > +
>> > +static const AVFilterPad outputs[] = {
>> > +    {
>> > +        .name          = "default",
>> > +        .type          = AVMEDIA_TYPE_VIDEO,
>> > +    },
>> > +    { NULL }
>> > +};
>> > +
>> > +#define DEFINE_COLOR_FILTER(name_, description_)
>> > \
>> > +    AVFilter ff_vf_##name_ = {
>> > \
>> > +        .name          = #name_,
>> > \
>> > +        .description   = NULL_IF_CONFIG_SMALL(description_),
>> > \
>> > +        .priv_size     = sizeof(ColorStatsContext),
>> > \
>> > +        .priv_class    = &name_ ## _class,
>> > \
>> > +        .init          = name_##_init,
>> > \
>> > +        .uninit        = uninit,
>> > \
>> > +        .query_formats = query_formats,
>> > \
>> > +        .inputs        = inputs,
>> > \
>> > +        .outputs       = outputs,
>> > \
>> > +        .activate      = activate,
>> > \
>> > +        .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC |
>> > \
>> > +                         AVFILTER_FLAG_SLICE_THREADS,
>> > \
>> > +    }
>> > +
>> > +#if CONFIG_COLORSTATS_FILTER
>> > +
>> > +#define colorstats_options options
>> > +AVFILTER_DEFINE_CLASS(colorstats);
>> > +
>> > +static int colorstats_init(AVFilterContext *ctx)
>> > +{
>> > +    return 0;
>> > +}
>> > +
>> > +DEFINE_COLOR_FILTER(colorstats, "Video color stats.");
>> > +#endif
>> > +
>> > +#if CONFIG_COLORRGBSTATS_FILTER
>> > +
>> > +#define colorrgbstats_options options
>> > +AVFILTER_DEFINE_CLASS(colorrgbstats);
>> > +
>> > +static int colorrgbstats_init(AVFilterContext *ctx)
>> > +{
>> > +    ColorStatsContext *s = ctx->priv;
>> > +
>> > +    s->force_fmt = 1;
>> > +    return 0;
>> > +}
>> > +
>> > +DEFINE_COLOR_FILTER(colorrgbstats, "Video RGB color stats.");
>> > +#endif
>> > +
>> > +#if CONFIG_COLORYUVSTATS_FILTER
>> > +
>> > +#define coloryuvstats_options options
>> > +AVFILTER_DEFINE_CLASS(coloryuvstats);
>> > +
>> > +static int coloryuvstats_init(AVFilterContext *ctx)
>> > +{
>> > +    ColorStatsContext *s = ctx->priv;
>> > +
>> > +    s->force_fmt = 2;
>> > +    return 0;
>> > +}
>> > +
>> > +DEFINE_COLOR_FILTER(coloryuvstats, "Video YUV color stats.");
>> > +#endif
>> > --
>> > 2.21.0
>> >
>> > _______________________________________________
>> > ffmpeg-devel mailing list
>> > ffmpeg-devel@ffmpeg.org
>> > https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>> >
>> > To unsubscribe, visit link above, or email
>> > ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
>
> --
> Thanks,
> Limin Wang
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
Limin Wang Dec. 27, 2019, 3:47 p.m. UTC | #4
On Fri, Dec 27, 2019 at 03:20:19PM +0100, Paul B Mahol wrote:
> On 12/27/19, Limin Wang <lance.lmwang@gmail.com> wrote:
> > On Fri, Dec 27, 2019 at 12:35:25PM +0100, Paul B Mahol wrote:
> >> You are duplicating some functionality of signalstats filter.
> >>
> > Yes, I have other function need to use the mean and stdev which is
> > support in showinfo filter(only 8bit and don't support packed format,
> > no multi-thread), and signalstats don't support rgb format and don't
> > have stdev, also it have too many other function and difficult to change
> > it, so I think it's more simple to create a new filter to do it.
> >
> 
> No, unacceptable. use signalstats filter.

The performance is one major reason also, below is the profiling result for
performance:

./ffmpeg -nostats -f lavfi -i testsrc2=4k:d=2 -vf bench=start,signalstats,bench=stop -f null -
[bench @ 0x3fb9080] t:0.161589 avg:0.165756 max:0.169923 min:0.161589
[bench @ 0x3fb9080] t:0.160334 avg:0.163948 max:0.169923 min:0.160334
[bench @ 0x3fb9080] t:0.160345 avg:0.163047 max:0.169923 min:0.160334
[bench @ 0x3fb9080] t:0.160924 avg:0.162623 max:0.169923 min:0.160334
[bench @ 0x3fb9080] t:0.160318 avg:0.162238 max:0.169923 min:0.160318

./ffmpeg -nostats -f lavfi -i testsrc2=4k:d=2 -vf bench=start,colorstats,bench=stop -f null -
[bench @ 0x26f6100] t:0.012596 avg:0.012612 max:0.012628 min:0.012596
[bench @ 0x26f6100] t:0.012542 avg:0.012588 max:0.012628 min:0.012542
[bench @ 0x26f6100] t:0.012529 avg:0.012573 max:0.012628 min:0.012529
[bench @ 0x26f6100] t:0.012532 avg:0.012565 max:0.012628 min:0.012529
[bench @ 0x26f6100] t:0.012527 avg:0.012559 max:0.012628 min:0.012527
[bench @ 0x26f6100] t:0.012525 avg:0.012554 max:0.012628 min:0.012525
[bench @ 0x26f6100] t:0.012522 avg:0.012550 max:0.012628 min:0.012522
[bench @ 0x26f6100] t:0.012552 avg:0.012550 max:0.012628 min:0.012522


> 
> >
> >> On 12/27/19, lance.lmwang@gmail.com <lance.lmwang@gmail.com> wrote:
> >> > From: Limin Wang <lance.lmwang@gmail.com>
> >> >
> >> > Signed-off-by: Limin Wang <lance.lmwang@gmail.com>
> >> > ---
> >> >  doc/filters.texi            |  74 ++++++
> >> >  libavfilter/Makefile        |   1 +
> >> >  libavfilter/allfilters.c    |   3 +
> >> >  libavfilter/vf_colorstats.c | 461 ++++++++++++++++++++++++++++++++++++
> >> >  4 files changed, 539 insertions(+)
> >> >  create mode 100644 libavfilter/vf_colorstats.c
> >> >
> >> > diff --git a/doc/filters.texi b/doc/filters.texi
> >> > index 8c5d3a5760..81968b2c17 100644
> >> > --- a/doc/filters.texi
> >> > +++ b/doc/filters.texi
> >> > @@ -7695,6 +7695,80 @@ For example to convert the input to SMPTE-240M,
> >> > use
> >> > the command:
> >> >  colorspace=smpte240m
> >> >  @end example
> >> >
> >> > +@section colorstats, colorrgbstats, coloryuvstats
> >> > +The filter provides statistical video measurements such as mean,
> >> > minimum,
> >> > maximum and
> >> > +standard deviation for each frame. The user can check for
> >> > unexpected/accidental errors
> >> > +very quickly with them.
> >> > +
> >> > +@var{colorrgbstats} report the color stats for RGB input video,
> >> > @var{coloryuvstats}
> >> > +to an YUV input video.
> >> > +
> >> > +These filters accept the following parameters:
> >> > +@table @option
> >> > +@item planes
> >> > +Set which planes to filter. Default is only the first plane.
> >> > +@end table
> >> > +
> >> > +By default the filter will report these metadata values if the planes
> >> > +are processed:
> >> > +
> >> > +@table @option
> >> > +@item min.y, min.u, min.v, min.r, min.g, min.b, min.a
> >> > +Display the minimal Y/U/V/R/G/B/A plane value contained within the
> >> > input
> >> > frame.
> >> > +Expressed in range of [0, 1<<bitdepth-1]
> >> > +
> >> > +@item pmin.y, pmin.u, pmin.v, pmin.r, pmin.g, pmin.b, min.a
> >> > +Display the minimal Y/U/V/R/G/B/A plane percentage of maximum contained
> >> > within
> >> > +the input frame. Expressed in range of [0, 1]
> >> > +
> >> > +@item max.y, max.u, max.v, max.r, max.g, max.b, max.a
> >> > +Display the maximum Y/U/V/R/G/B/A plane value contained within the
> >> > input
> >> > frame.
> >> > +Expressed in range of [0, 1<<bitdepth-1]
> >> > +
> >> > +@item pmax.y, pmax.u, pmax.v, pmax.r, pmax.g, pmax.b, pmax.a
> >> > +Display the maximum Y/U/V/R/G/B/A plane percentage of maximum contained
> >> > within
> >> > +the input frame. Expressed in range of [0, 1]
> >> > +
> >> > +@item mean.y, mean.u, mean.v, mean.r, mean.g, mean.b, mean.a
> >> > +Display the Y/U/V/R/G/B/A plane mean value contained within the input
> >> > frame.
> >> > +Expressed in range of [0, 1<<bitdepth-1]
> >> > +
> >> > +@item pmean.y, pmean.u, pmean.v, pmean.r, pmean.g, pmean.b, pmean.a
> >> > +Display the Y/U/V/R/G/B/A plane mean value percentage of maximum
> >> > contained
> >> > within
> >> > +the input frame. Expressed in range of [0, 1]
> >> > +
> >> > +@item stdev.y, stdev.u, stdev.v, stdev.r, stdev.g, stdev.b, stdev.a
> >> > +Display the Y/U/V/R/G/B/A plane standard deviation value contained
> >> > within
> >> > the
> >> > +input frame. Expressed in range of [0, 1<<bitdepth-1]
> >> > +
> >> > +@item pstdev.y, pstdev.u, pstdev.v, pstdev.r, pstdev.g, pstdev.b,
> >> > pstdev.a
> >> > +Display the Y/U/V/R/G/B/A plane standard deviation value percentage of
> >> > maximum contained
> >> > +within the input frame. Expressed in range of [0, 1]
> >> > +@end table
> >> > +
> >> > +@subsection Examples
> >> > +
> >> > +@itemize
> >> > +@item
> >> > +Show all YUV color stats for each frame:
> >> > +@example
> >> > +ffprobe -f lavfi movie=example.mov,coloryuvstats=planes=0xf
> >> > -show_frames
> >> > +@end example
> >> > +
> >> > +@item
> >> > +Draw graph for the pmean and pstdev value of the Y plane per frame:
> >> > +@example
> >> > +ffplay -i example.mov -vf
> >> > coloryuvstats,drawgraph=m1=lavf.colorstats.pmean.y:m2=lavf.colorstats.pstdev.y:min=0:max=1
> >> > +@end example
> >> > +
> >> > +@item
> >> > +Print all RGB color stats for each frame:
> >> > +@example
> >> > +ffplay -i example.mov -vf colorrgbstats=planes=0xf,metadata=mode=print
> >> > +@end example
> >> > +
> >> > +@end itemize
> >> > +
> >> >  @section convolution
> >> >
> >> >  Apply convolution of 3x3, 5x5, 7x7 or horizontal/vertical up to 49
> >> > elements.
> >> > diff --git a/libavfilter/Makefile b/libavfilter/Makefile
> >> > index 37d4eee858..a007bd32d1 100644
> >> > --- a/libavfilter/Makefile
> >> > +++ b/libavfilter/Makefile
> >> > @@ -182,6 +182,7 @@ OBJS-$(CONFIG_CIESCOPE_FILTER)               +=
> >> > vf_ciescope.o
> >> >  OBJS-$(CONFIG_CODECVIEW_FILTER)              += vf_codecview.o
> >> >  OBJS-$(CONFIG_COLORBALANCE_FILTER)           += vf_colorbalance.o
> >> >  OBJS-$(CONFIG_COLORCHANNELMIXER_FILTER)      += vf_colorchannelmixer.o
> >> > +OBJS-$(CONFIG_COLORSTATS_FILTER)             += vf_colorstats.o
> >> >  OBJS-$(CONFIG_COLORKEY_FILTER)               += vf_colorkey.o
> >> >  OBJS-$(CONFIG_COLORKEY_OPENCL_FILTER)        += vf_colorkey_opencl.o
> >> > opencl.o \
> >> >                                                  opencl/colorkey.o
> >> > diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
> >> > index c295f8e403..6b84a45452 100644
> >> > --- a/libavfilter/allfilters.c
> >> > +++ b/libavfilter/allfilters.c
> >> > @@ -172,6 +172,9 @@ extern AVFilter ff_vf_ciescope;
> >> >  extern AVFilter ff_vf_codecview;
> >> >  extern AVFilter ff_vf_colorbalance;
> >> >  extern AVFilter ff_vf_colorchannelmixer;
> >> > +extern AVFilter ff_vf_colorstats;
> >> > +extern AVFilter ff_vf_colorrgbstats;
> >> > +extern AVFilter ff_vf_coloryuvstats;
> >> >  extern AVFilter ff_vf_colorkey;
> >> >  extern AVFilter ff_vf_colorkey_opencl;
> >> >  extern AVFilter ff_vf_colorhold;
> >> > diff --git a/libavfilter/vf_colorstats.c b/libavfilter/vf_colorstats.c
> >> > new file mode 100644
> >> > index 0000000000..7e94c572f9
> >> > --- /dev/null
> >> > +++ b/libavfilter/vf_colorstats.c
> >> > @@ -0,0 +1,461 @@
> >> > +/*
> >> > + * This file is part of FFmpeg.
> >> > + *
> >> > + * FFmpeg is free software; you can redistribute it and/or
> >> > + * modify it under the terms of the GNU Lesser General Public
> >> > + * License as published by the Free Software Foundation; either
> >> > + * version 2.1 of the License, or (at your option) any later version.
> >> > + *
> >> > + * FFmpeg is distributed in the hope that it will be useful,
> >> > + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> >> > + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> >> > + * Lesser General Public License for more details.
> >> > + *
> >> > + * You should have received a copy of the GNU Lesser General Public
> >> > + * License along with FFmpeg; if not, write to the Free Software
> >> > + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
> >> > 02110-1301
> >> > USA
> >> > + */
> >> > +
> >> > +#include "libavutil/imgutils.h"
> >> > +#include "libavutil/opt.h"
> >> > +#include "libavutil/pixdesc.h"
> >> > +
> >> > +#include "avfilter.h"
> >> > +#include "drawutils.h"
> >> > +#include "filters.h"
> >> > +
> >> > +#define R 0
> >> > +#define G 1
> >> > +#define B 2
> >> > +#define A 3
> >> > +
> >> > +typedef struct ThreadData {
> >> > +    AVFrame *in;
> >> > +} ThreadData;
> >> > +
> >> > +typedef struct ColorStatsContext {
> >> > +    const AVClass *class;
> >> > +
> >> > +    ptrdiff_t width[4];
> >> > +    ptrdiff_t height[4];
> >> > +
> >> > +    int planes;
> >> > +    int step;
> >> > +    int bitdepth;
> >> > +    int nb_components;
> >> > +    int thread_count;
> >> > +    int is_16bit;
> >> > +    int is_rgb;
> >> > +    int force_fmt; /* 0: all, 1: rgb, 2: yuv */
> >> > +    uint8_t rgba_map[4];
> >> > +    char comps[4];
> >> > +
> >> > +    double *mean[4];
> >> > +    double *stdev[4];
> >> > +
> >> > +    int max;
> >> > +    double *min_value[4];
> >> > +    double *max_value[4];
> >> > +
> >> > +    int  (*stats_slice)(AVFilterContext *ctx, void *arg, int jobnr, int
> >> > nb_jobs);
> >> > +} ColorStatsContext;
> >> > +
> >> > +#define OFFSET(x) offsetof(ColorStatsContext, x)
> >> > +#define V AV_OPT_FLAG_VIDEO_PARAM
> >> > +#define F AV_OPT_FLAG_FILTERING_PARAM
> >> > +
> >> > +static const AVOption options[] = {
> >> > +    { "planes", "set planes to filter", OFFSET(planes),
> >> > AV_OPT_TYPE_INT,
> >> > {.i64=1}, 1, 0xf, V|F},
> >> > +    {NULL}
> >> > +};
> >> > +
> >> > +#define YUV_FORMATS                                                 \
> >> > +    AV_PIX_FMT_YUV444P,  AV_PIX_FMT_YUV422P,  AV_PIX_FMT_YUV420P,   \
> >> > +    AV_PIX_FMT_YUV411P,  AV_PIX_FMT_YUV410P,  AV_PIX_FMT_YUV440P,   \
> >> > +    AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P,                       \
> >> > +    AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ440P,                       \
> >> > +    AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV422P9,                       \
> >> > +    AV_PIX_FMT_YUV420P9,                                            \
> >> > +    AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV422P10,                     \
> >> > +    AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV440P10,                     \
> >> > +    AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12,                     \
> >> > +    AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV440P12,                     \
> >> > +    AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14,                     \
> >> > +    AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV444P16,                     \
> >> > +    AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV420P16,                     \
> >> > +    AV_PIX_FMT_YUVA420P,  AV_PIX_FMT_YUVA422P,                      \
> >> > +    AV_PIX_FMT_YUVA444P16,AV_PIX_FMT_YUVA422P16,                    \
> >> > +    AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P12,                    \
> >> > +    AV_PIX_FMT_YUVA444P12,AV_PIX_FMT_YUVA444P,                      \
> >> > +    AV_PIX_FMT_GRAY8,     AV_PIX_FMT_GRAY9,   AV_PIX_FMT_GRAY10,    \
> >> > +    AV_PIX_FMT_GRAY12,    AV_PIX_FMT_GRAY14,  AV_PIX_FMT_GRAY16
> >> > +
> >> > +#define RGB_FORMATS                                                 \
> >> > +    AV_PIX_FMT_0RGB,      AV_PIX_FMT_0BGR,                          \
> >> > +    AV_PIX_FMT_RGB0,      AV_PIX_FMT_BGR0,                          \
> >> > +    AV_PIX_FMT_ARGB,      AV_PIX_FMT_RGBA,    AV_PIX_FMT_ABGR,      \
> >> > +    AV_PIX_FMT_BGRA,      AV_PIX_FMT_RGB24,   AV_PIX_FMT_BGR24,     \
> >> > +    AV_PIX_FMT_RGB48,     AV_PIX_FMT_RGBA64,  AV_PIX_FMT_GBRP,      \
> >> > +    AV_PIX_FMT_GBRAP,     AV_PIX_FMT_GBRP9,   AV_PIX_FMT_GBRP10,    \
> >> > +    AV_PIX_FMT_GBRAP10,   AV_PIX_FMT_GBRP12,  AV_PIX_FMT_GBRP14,    \
> >> > +    AV_PIX_FMT_GBRP16,    AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16
> >> > +
> >> > +static const enum AVPixelFormat yuv_pix_fmts[] = { YUV_FORMATS,
> >> > AV_PIX_FMT_NONE };
> >> > +static const enum AVPixelFormat rgb_pix_fmts[] = { RGB_FORMATS,
> >> > AV_PIX_FMT_NONE };
> >> > +static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS,
> >> > YUV_FORMATS, AV_PIX_FMT_NONE };
> >> > +
> >> > +static int query_formats(AVFilterContext *ctx)
> >> > +{
> >> > +    const ColorStatsContext *s = ctx->priv;
> >> > +    const enum AVPixelFormat *pix_fmts = s->force_fmt == 1 ?
> >> > rgb_pix_fmts :
> >> > +                                         s->force_fmt == 2 ?
> >> > yuv_pix_fmts :
> >> > +                                         all_pix_fmts;
> >> > +
> >> > +    AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
> >> > +    if (!fmts_list)
> >> > +        return AVERROR(ENOMEM);
> >> > +    return ff_set_common_formats(ctx, fmts_list);
> >> > +}
> >> > +
> >> > +#define DECLARE_STATS_PLANAR_FUNC(nbits, div)
> >> >                       \
> >> > +static int stats_slice_planar_##nbits(AVFilterContext *ctx, void *arg,
> >> > int
> >> > jobnr, int nb_jobs)    \
> >> > +{
> >> >                       \
> >> > +    const ColorStatsContext *s = ctx->priv;
> >> >                       \
> >> > +    ThreadData *td = arg;
> >> >                       \
> >> > +    const AVFrame *in = td->in;
> >> >                       \
> >> > +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
> >> >                       \
> >> > +    int32_t count[4] = { 0 };
> >> >                       \
> >> > +    double min_value[4] = { s->max };
> >> >                       \
> >> > +    double max_value[4] = { 0 };
> >> >                       \
> >> > +
> >> >                       \
> >> > +    for (int i = 0; i < s->nb_components; i++) {
> >> >                       \
> >> > +        const int width = s->width[i];
> >> >                       \
> >> > +        const int height = s->height[i];
> >> >                       \
> >> > +        const int slice_start = (height *  jobnr     ) / nb_jobs;
> >> >                       \
> >> > +        const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
> >> >                       \
> >> > +        int linesize = in->linesize[i] / div;
> >> >                       \
> >> > +        uint##nbits##_t *src = (uint##nbits##_t*)in->data[i] +
> >> > slice_start
> >> > * linesize;            \
> >> > +
> >> >                       \
> >> > +        if (!(s->planes & (1 << i)))
> >> >                       \
> >> > +            continue;
> >> >                       \
> >> > +        for (int j = slice_start; j < slice_end; j++) {
> >> >                       \
> >> > +            for (int x = 0; x < width; x++) {
> >> >                       \
> >> > +                sum[i] += src[x];
> >> >                       \
> >> > +                sum2[i] += src[x] * src[x];
> >> >                       \
> >> > +                if (src[i] > max_value[i]) max_value[i] = src[i];
> >> >                       \
> >> > +                if (src[i] < min_value[i]) min_value[i] = src[i];
> >> >                       \
> >> > +            }
> >> >                       \
> >> > +            count[i] += width;
> >> >                       \
> >> > +            src += linesize;
> >> >                       \
> >> > +        }
> >> >                       \
> >> > +
> >> >                       \
> >> > +        s->mean[i][jobnr] = (double)(sum[i] + count[i] / 2) / count[i];
> >> >                       \
> >> > +        s->stdev[i][jobnr] = sqrt((sum2[i] - sum[i] * (double)sum[i] /
> >> > count[i]) / count[i]);     \
> >> > +        s->min_value[i][jobnr] = min_value[i];
> >> >                       \
> >> > +        s->max_value[i][jobnr] = max_value[i];
> >> >                       \
> >> > +    }
> >> >                       \
> >> > +
> >> >                       \
> >> > +    return 0;
> >> >                       \
> >> > +}
> >> > +DECLARE_STATS_PLANAR_FUNC(8, 1)
> >> > +DECLARE_STATS_PLANAR_FUNC(16, 2)
> >> > +
> >> > +#define DECLARE_STATS_PACKED_FUNC(nbits, div)
> >> >                       \
> >> > +static int stats_slice_packed_##nbits(AVFilterContext *ctx, void *arg,
> >> > int
> >> > jobnr, int nb_jobs)    \
> >> > +{
> >> >                       \
> >> > +    const ColorStatsContext *s = ctx->priv;
> >> >                       \
> >> > +    ThreadData *td = arg;
> >> >                       \
> >> > +    const AVFrame *in = td->in;
> >> >                       \
> >> > +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
> >> >                       \
> >> > +    double min_value[4] = { s->max };
> >> >                       \
> >> > +    double max_value[4] = { 0 };
> >> >                       \
> >> > +    int32_t count[4] = { 0 };
> >> >                       \
> >> > +    const int width = in->width;
> >> >                       \
> >> > +    const int height = in->height;
> >> >                       \
> >> > +    const int slice_start = (height *  jobnr     ) / nb_jobs;
> >> >                       \
> >> > +    const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
> >> >                       \
> >> > +    int linesize = in->linesize[0] / div;
> >> >                       \
> >> > +    uint##nbits##_t *src = (uint##nbits##_t*)in->data[0] + slice_start
> >> > *
> >> > linesize;                \
> >> > +    const uint8_t ro = s->rgba_map[R];
> >> >                       \
> >> > +    const uint8_t go = s->rgba_map[G];
> >> >                       \
> >> > +    const uint8_t bo = s->rgba_map[B];
> >> >                       \
> >> > +    const uint8_t ao = s->rgba_map[A];
> >> >                       \
> >> > +
> >> >                       \
> >> > +    for (int y = slice_start; y < slice_end; y++) {
> >> >                       \
> >> > +        for (int x = 0; x < width * s->step; x += s->step) {
> >> >                       \
> >> > +            const int r = src[x + ro];
> >> >                       \
> >> > +            const int g = src[x + go];
> >> >                       \
> >> > +            const int b = src[x + bo];
> >> >                       \
> >> > +            const int a = src[x + ao];
> >> >                       \
> >> > +
> >> >                       \
> >> > +            sum[ro] += r;
> >> >                       \
> >> > +            sum[go] += g;
> >> >                       \
> >> > +            sum[bo] += b;
> >> >                       \
> >> > +            sum2[ro] += r * r;
> >> >                       \
> >> > +            sum2[go] += g * g;
> >> >                       \
> >> > +            sum2[bo] += b * b;
> >> >                       \
> >> > +
> >> >                       \
> >> > +            if (r > max_value[ro]) max_value[ro] = r;
> >> >                       \
> >> > +            if (r < min_value[ro]) min_value[ro] = r;
> >> >                       \
> >> > +            if (g > max_value[go]) max_value[go] = g;
> >> >                       \
> >> > +            if (g < min_value[go]) min_value[go] = g;
> >> >                       \
> >> > +            if (b > max_value[bo]) max_value[bo] = b;
> >> >                       \
> >> > +            if (b < min_value[bo]) min_value[bo] = b;
> >> >                       \
> >> > +            if (s->step == 4) {
> >> >                       \
> >> > +                sum2[ao] += a * a;
> >> >                       \
> >> > +                sum[ao] += a;
> >> >                       \
> >> > +                if (a > max_value[ao]) max_value[ao] = a;
> >> >                       \
> >> > +                if (a < min_value[ao]) min_value[ao] = a;
> >> >                       \
> >> > +            }
> >> >                       \
> >> > +        }
> >> >                       \
> >> > +        count[ro] += width;
> >> >                       \
> >> > +        count[go] += width;
> >> >                       \
> >> > +        count[bo] += width;
> >> >                       \
> >> > +        if (s->step == 4)
> >> >                       \
> >> > +            count[ao] += width;
> >> >                       \
> >> > +        src += linesize;
> >> >                       \
> >> > +    }
> >> >                       \
> >> > +
> >> >                       \
> >> > +    for (int p = 0; p < s->nb_components; p++) {
> >> >                       \
> >> > +        int ci = s->is_rgb ? s->rgba_map[p] : p;
> >> >                       \
> >> > +        double variance;
> >> >                       \
> >> > +
> >> >                       \
> >> > +        s->mean[ci][jobnr] = (double)(sum[ci] + count[ci] / 2) /
> >> > count[ci];
> >> >                       \
> >> > +        variance = (sum2[ci] - sum[ci] * (double)sum[ci] / count[ci]) /
> >> > count[ci];                \
> >> > +        s->stdev[ci][jobnr] = sqrt(variance);
> >> >                       \
> >> > +        s->min_value[ci][jobnr] = min_value[ci];
> >> >                       \
> >> > +        s->max_value[ci][jobnr] = max_value[ci];
> >> >                       \
> >> > +    }
> >> >                       \
> >> > +
> >> >                       \
> >> > +    return 0;
> >> >                       \
> >> > +}
> >> > +DECLARE_STATS_PACKED_FUNC(8, 1)
> >> > +DECLARE_STATS_PACKED_FUNC(16, 2)
> >> > +
> >> > +static av_cold void uninit(AVFilterContext *ctx)
> >> > +{
> >> > +    ColorStatsContext *s = ctx->priv;
> >> > +
> >> > +    for (int i = 0; i < s->nb_components; i++) {
> >> > +        av_freep(&s->mean[i]);
> >> > +        av_freep(&s->stdev[i]);
> >> > +        av_freep(&s->min_value[i]);
> >> > +        av_freep(&s->max_value[i]);
> >> > +    }
> >> > +}
> >> > +
> >> > +static int config_input(AVFilterLink *inlink)
> >> > +{
> >> > +    AVFilterContext *ctx = inlink->dst;
> >> > +    ColorStatsContext *s = ctx->priv;
> >> > +    const AVPixFmtDescriptor *desc =
> >> > av_pix_fmt_desc_get(inlink->format);
> >> > +
> >> > +    s->nb_components = desc->nb_components;
> >> > +    s->bitdepth = desc->comp[0].depth;
> >> > +    s->is_16bit = s->bitdepth > 8;
> >> > +    s->step = av_get_padded_bits_per_pixel(desc) >> (3 + s->is_16bit);
> >> > +    s->max  = 1 << s->bitdepth - 1;
> >> > +
> >> > +    s->is_rgb = ff_fill_rgba_map(s->rgba_map, inlink->format) >= 0;
> >> > +    s->comps[0] = s->is_rgb ? 'r' : 'y' ;
> >> > +    s->comps[1] = s->is_rgb ? 'g' : 'u' ;
> >> > +    s->comps[2] = s->is_rgb ? 'b' : 'v' ;
> >> > +    s->comps[3] = 'a';
> >> > +
> >> > +    s->thread_count = FFMAX(1, FFMIN(inlink->h,
> >> > ff_filter_get_nb_threads(ctx)));
> >> > +    for (int i = 0; i < s->nb_components; i++) {
> >> > +        ptrdiff_t line_size = av_image_get_linesize(inlink->format,
> >> > inlink->w, i);
> >> > +
> >> > +        s->width[i] = line_size >> (s->bitdepth > 8);
> >> > +        s->height[i] = inlink->h >> ((i == 1 || i == 2) ?
> >> > desc->log2_chroma_h : 0);
> >> > +
> >> > +        s->mean[i] = av_mallocz_array(s->thread_count,
> >> > sizeof(*s->mean[i]));
> >> > +        s->stdev[i] = av_mallocz_array(s->thread_count,
> >> > sizeof(*s->stdev[i]));
> >> > +        s->max_value[i] = av_mallocz_array(s->thread_count,
> >> > sizeof(*s->max_value[i]));
> >> > +        s->min_value[i] = av_mallocz_array(s->thread_count,
> >> > sizeof(*s->min_value[i]));
> >> > +        if (!s->mean[i] || !s->stdev[i] || !s->max_value[i] ||
> >> > !s->min_value[i])
> >> > +            return AVERROR(ENOMEM);
> >> > +        for (int j = 0; j < s->thread_count; j++) {
> >> > +            s->min_value[i][j] = (1 << s->bitdepth);
> >> > +            s->max_value[i][j] = 0;
> >> > +        }
> >> > +    }
> >> > +
> >> > +    if (desc->flags & AV_PIX_FMT_FLAG_PLANAR)
> >> > +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_planar_8 :
> >> > stats_slice_planar_16;
> >> > +    else
> >> > +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_packed_8 :
> >> > stats_slice_packed_16;
> >> > +
> >> > +    return 0;
> >> > +}
> >> > +
> >> > +static void set_meta_float(AVDictionary **metadata, const char *key,
> >> > char
> >> > c, float d)
> >> > +{
> >> > +    char value[128];
> >> > +    char key2[128];
> >> > +
> >> > +    snprintf(value, sizeof(value), "%.2f", d);
> >> > +    if (c)
> >> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
> >> > +    else
> >> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
> >> > +    av_dict_set(metadata, key2, value, 0);
> >> > +}
> >> > +
> >> > +static void set_meta_int(AVDictionary **metadata, const char *key, char
> >> > c,
> >> > int d)
> >> > +{
> >> > +    char value[128];
> >> > +    char key2[128];
> >> > +
> >> > +    snprintf(value, sizeof(value), "%d", d);
> >> > +    if (c)
> >> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
> >> > +    else
> >> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
> >> > +    av_dict_set(metadata, key2, value, 0);
> >> > +}
> >> > +
> >> > +static void report_detect_result(AVFilterContext *ctx, AVFrame *in)
> >> > +{
> >> > +    const ColorStatsContext *s = ctx->priv;
> >> > +    double mean[4] = { 0 };
> >> > +    double stdev[4] = { 0 };
> >> > +    double min_value[4] = { s->max };
> >> > +    double max_value[4] = { 0 };
> >> > +    int cidx;
> >> > +
> >> > +    for (int p = 0; p < s->nb_components; p++) {
> >> > +        cidx = s->is_rgb ? s->rgba_map[p] : p;
> >> > +
> >> > +        if (!(s->planes & (1 << p)))
> >> > +            continue;
> >> > +
> >> > +        for (int j = 0; j < s->thread_count; j++) {
> >> > +            mean[cidx] += s->mean[cidx][j];
> >> > +            stdev[cidx] += s->stdev[cidx][j];
> >> > +            if (s->min_value[cidx][j] < min_value[cidx])
> >> > +                min_value[cidx] = s->min_value[cidx][j];
> >> > +            if (s->max_value[cidx][j] > max_value[cidx])
> >> > +                max_value[cidx] = s->max_value[cidx][j];
> >> > +        }
> >> > +        mean[cidx] = mean[cidx] / s->thread_count;
> >> > +        stdev[cidx] = stdev[cidx] / s->thread_count;
> >> > +
> >> > +        set_meta_int(&in->metadata, "min", s->comps[p],
> >> > min_value[cidx]);
> >> > +        set_meta_int(&in->metadata, "max", s->comps[p],
> >> > max_value[cidx]);
> >> > +        set_meta_int(&in->metadata, "mean", s->comps[p], mean[cidx]);
> >> > +        set_meta_int(&in->metadata, "stdev", s->comps[p], stdev[cidx]);
> >> > +
> >> > +        set_meta_float(&in->metadata, "pmin", s->comps[p],
> >> > min_value[cidx]
> >> > / s->max);
> >> > +        set_meta_float(&in->metadata, "pmax", s->comps[p],
> >> > max_value[cidx]
> >> > / s->max);
> >> > +        set_meta_float(&in->metadata, "pmean", s->comps[p], mean[cidx]
> >> > /
> >> > s->max);
> >> > +        set_meta_float(&in->metadata, "pstdev", s->comps[p],
> >> > stdev[cidx] /
> >> > s->max);
> >> > +    }
> >> > +}
> >> > +
> >> > +static int activate(AVFilterContext *ctx)
> >> > +{
> >> > +    int ret;
> >> > +    AVFilterLink *inlink = ctx->inputs[0];
> >> > +    AVFilterLink *outlink = ctx->outputs[0];
> >> > +    ColorStatsContext *s = ctx->priv;
> >> > +    AVFrame *in;
> >> > +    ThreadData td;
> >> > +
> >> > +    FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
> >> > +
> >> > +    ret = ff_inlink_consume_frame(inlink, &in);
> >> > +    if (ret < 0)
> >> > +        return ret;
> >> > +
> >> > +    if (in) {
> >> > +        td.in = in;
> >> > +        ctx->internal->execute(ctx, s->stats_slice, &td, NULL,
> >> > s->thread_count);
> >> > +
> >> > +        report_detect_result(ctx, in);
> >> > +        return ff_filter_frame(outlink, in);
> >> > +    }
> >> > +
> >> > +    FF_FILTER_FORWARD_STATUS(inlink, outlink);
> >> > +    FF_FILTER_FORWARD_WANTED(outlink, inlink);
> >> > +
> >> > +    return FFERROR_NOT_READY;
> >> > +}
> >> > +
> >> > +static const AVFilterPad inputs[] = {
> >> > +    {
> >> > +        .name         = "default",
> >> > +        .type         = AVMEDIA_TYPE_VIDEO,
> >> > +        .config_props = config_input,
> >> > +    },
> >> > +    { NULL }
> >> > +};
> >> > +
> >> > +static const AVFilterPad outputs[] = {
> >> > +    {
> >> > +        .name          = "default",
> >> > +        .type          = AVMEDIA_TYPE_VIDEO,
> >> > +    },
> >> > +    { NULL }
> >> > +};
> >> > +
> >> > +#define DEFINE_COLOR_FILTER(name_, description_)
> >> > \
> >> > +    AVFilter ff_vf_##name_ = {
> >> > \
> >> > +        .name          = #name_,
> >> > \
> >> > +        .description   = NULL_IF_CONFIG_SMALL(description_),
> >> > \
> >> > +        .priv_size     = sizeof(ColorStatsContext),
> >> > \
> >> > +        .priv_class    = &name_ ## _class,
> >> > \
> >> > +        .init          = name_##_init,
> >> > \
> >> > +        .uninit        = uninit,
> >> > \
> >> > +        .query_formats = query_formats,
> >> > \
> >> > +        .inputs        = inputs,
> >> > \
> >> > +        .outputs       = outputs,
> >> > \
> >> > +        .activate      = activate,
> >> > \
> >> > +        .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC |
> >> > \
> >> > +                         AVFILTER_FLAG_SLICE_THREADS,
> >> > \
> >> > +    }
> >> > +
> >> > +#if CONFIG_COLORSTATS_FILTER
> >> > +
> >> > +#define colorstats_options options
> >> > +AVFILTER_DEFINE_CLASS(colorstats);
> >> > +
> >> > +static int colorstats_init(AVFilterContext *ctx)
> >> > +{
> >> > +    return 0;
> >> > +}
> >> > +
> >> > +DEFINE_COLOR_FILTER(colorstats, "Video color stats.");
> >> > +#endif
> >> > +
> >> > +#if CONFIG_COLORRGBSTATS_FILTER
> >> > +
> >> > +#define colorrgbstats_options options
> >> > +AVFILTER_DEFINE_CLASS(colorrgbstats);
> >> > +
> >> > +static int colorrgbstats_init(AVFilterContext *ctx)
> >> > +{
> >> > +    ColorStatsContext *s = ctx->priv;
> >> > +
> >> > +    s->force_fmt = 1;
> >> > +    return 0;
> >> > +}
> >> > +
> >> > +DEFINE_COLOR_FILTER(colorrgbstats, "Video RGB color stats.");
> >> > +#endif
> >> > +
> >> > +#if CONFIG_COLORYUVSTATS_FILTER
> >> > +
> >> > +#define coloryuvstats_options options
> >> > +AVFILTER_DEFINE_CLASS(coloryuvstats);
> >> > +
> >> > +static int coloryuvstats_init(AVFilterContext *ctx)
> >> > +{
> >> > +    ColorStatsContext *s = ctx->priv;
> >> > +
> >> > +    s->force_fmt = 2;
> >> > +    return 0;
> >> > +}
> >> > +
> >> > +DEFINE_COLOR_FILTER(coloryuvstats, "Video YUV color stats.");
> >> > +#endif
> >> > --
> >> > 2.21.0
> >> >
> >> > _______________________________________________
> >> > ffmpeg-devel mailing list
> >> > ffmpeg-devel@ffmpeg.org
> >> > https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
> >> >
> >> > To unsubscribe, visit link above, or email
> >> > ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
> >
> > --
> > Thanks,
> > Limin Wang
> > _______________________________________________
> > ffmpeg-devel mailing list
> > ffmpeg-devel@ffmpeg.org
> > https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
> >
> > To unsubscribe, visit link above, or email
> > ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
> 
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
Paul B Mahol Dec. 27, 2019, 3:49 p.m. UTC | #5
That is because signalstats is doing more stuff.

On 12/27/19, Limin Wang <lance.lmwang@gmail.com> wrote:
> On Fri, Dec 27, 2019 at 03:20:19PM +0100, Paul B Mahol wrote:
>> On 12/27/19, Limin Wang <lance.lmwang@gmail.com> wrote:
>> > On Fri, Dec 27, 2019 at 12:35:25PM +0100, Paul B Mahol wrote:
>> >> You are duplicating some functionality of signalstats filter.
>> >>
>> > Yes, I have other function need to use the mean and stdev which is
>> > support in showinfo filter(only 8bit and don't support packed format,
>> > no multi-thread), and signalstats don't support rgb format and don't
>> > have stdev, also it have too many other function and difficult to change
>> > it, so I think it's more simple to create a new filter to do it.
>> >
>>
>> No, unacceptable. use signalstats filter.
>
> The performance is one major reason also, below is the profiling result for
> performance:
>
> ./ffmpeg -nostats -f lavfi -i testsrc2=4k:d=2 -vf
> bench=start,signalstats,bench=stop -f null -
> [bench @ 0x3fb9080] t:0.161589 avg:0.165756 max:0.169923 min:0.161589
> [bench @ 0x3fb9080] t:0.160334 avg:0.163948 max:0.169923 min:0.160334
> [bench @ 0x3fb9080] t:0.160345 avg:0.163047 max:0.169923 min:0.160334
> [bench @ 0x3fb9080] t:0.160924 avg:0.162623 max:0.169923 min:0.160334
> [bench @ 0x3fb9080] t:0.160318 avg:0.162238 max:0.169923 min:0.160318
>
> ./ffmpeg -nostats -f lavfi -i testsrc2=4k:d=2 -vf
> bench=start,colorstats,bench=stop -f null -
> [bench @ 0x26f6100] t:0.012596 avg:0.012612 max:0.012628 min:0.012596
> [bench @ 0x26f6100] t:0.012542 avg:0.012588 max:0.012628 min:0.012542
> [bench @ 0x26f6100] t:0.012529 avg:0.012573 max:0.012628 min:0.012529
> [bench @ 0x26f6100] t:0.012532 avg:0.012565 max:0.012628 min:0.012529
> [bench @ 0x26f6100] t:0.012527 avg:0.012559 max:0.012628 min:0.012527
> [bench @ 0x26f6100] t:0.012525 avg:0.012554 max:0.012628 min:0.012525
> [bench @ 0x26f6100] t:0.012522 avg:0.012550 max:0.012628 min:0.012522
> [bench @ 0x26f6100] t:0.012552 avg:0.012550 max:0.012628 min:0.012522
>
>
>>
>> >
>> >> On 12/27/19, lance.lmwang@gmail.com <lance.lmwang@gmail.com> wrote:
>> >> > From: Limin Wang <lance.lmwang@gmail.com>
>> >> >
>> >> > Signed-off-by: Limin Wang <lance.lmwang@gmail.com>
>> >> > ---
>> >> >  doc/filters.texi            |  74 ++++++
>> >> >  libavfilter/Makefile        |   1 +
>> >> >  libavfilter/allfilters.c    |   3 +
>> >> >  libavfilter/vf_colorstats.c | 461
>> >> > ++++++++++++++++++++++++++++++++++++
>> >> >  4 files changed, 539 insertions(+)
>> >> >  create mode 100644 libavfilter/vf_colorstats.c
>> >> >
>> >> > diff --git a/doc/filters.texi b/doc/filters.texi
>> >> > index 8c5d3a5760..81968b2c17 100644
>> >> > --- a/doc/filters.texi
>> >> > +++ b/doc/filters.texi
>> >> > @@ -7695,6 +7695,80 @@ For example to convert the input to
>> >> > SMPTE-240M,
>> >> > use
>> >> > the command:
>> >> >  colorspace=smpte240m
>> >> >  @end example
>> >> >
>> >> > +@section colorstats, colorrgbstats, coloryuvstats
>> >> > +The filter provides statistical video measurements such as mean,
>> >> > minimum,
>> >> > maximum and
>> >> > +standard deviation for each frame. The user can check for
>> >> > unexpected/accidental errors
>> >> > +very quickly with them.
>> >> > +
>> >> > +@var{colorrgbstats} report the color stats for RGB input video,
>> >> > @var{coloryuvstats}
>> >> > +to an YUV input video.
>> >> > +
>> >> > +These filters accept the following parameters:
>> >> > +@table @option
>> >> > +@item planes
>> >> > +Set which planes to filter. Default is only the first plane.
>> >> > +@end table
>> >> > +
>> >> > +By default the filter will report these metadata values if the
>> >> > planes
>> >> > +are processed:
>> >> > +
>> >> > +@table @option
>> >> > +@item min.y, min.u, min.v, min.r, min.g, min.b, min.a
>> >> > +Display the minimal Y/U/V/R/G/B/A plane value contained within the
>> >> > input
>> >> > frame.
>> >> > +Expressed in range of [0, 1<<bitdepth-1]
>> >> > +
>> >> > +@item pmin.y, pmin.u, pmin.v, pmin.r, pmin.g, pmin.b, min.a
>> >> > +Display the minimal Y/U/V/R/G/B/A plane percentage of maximum
>> >> > contained
>> >> > within
>> >> > +the input frame. Expressed in range of [0, 1]
>> >> > +
>> >> > +@item max.y, max.u, max.v, max.r, max.g, max.b, max.a
>> >> > +Display the maximum Y/U/V/R/G/B/A plane value contained within the
>> >> > input
>> >> > frame.
>> >> > +Expressed in range of [0, 1<<bitdepth-1]
>> >> > +
>> >> > +@item pmax.y, pmax.u, pmax.v, pmax.r, pmax.g, pmax.b, pmax.a
>> >> > +Display the maximum Y/U/V/R/G/B/A plane percentage of maximum
>> >> > contained
>> >> > within
>> >> > +the input frame. Expressed in range of [0, 1]
>> >> > +
>> >> > +@item mean.y, mean.u, mean.v, mean.r, mean.g, mean.b, mean.a
>> >> > +Display the Y/U/V/R/G/B/A plane mean value contained within the
>> >> > input
>> >> > frame.
>> >> > +Expressed in range of [0, 1<<bitdepth-1]
>> >> > +
>> >> > +@item pmean.y, pmean.u, pmean.v, pmean.r, pmean.g, pmean.b, pmean.a
>> >> > +Display the Y/U/V/R/G/B/A plane mean value percentage of maximum
>> >> > contained
>> >> > within
>> >> > +the input frame. Expressed in range of [0, 1]
>> >> > +
>> >> > +@item stdev.y, stdev.u, stdev.v, stdev.r, stdev.g, stdev.b, stdev.a
>> >> > +Display the Y/U/V/R/G/B/A plane standard deviation value contained
>> >> > within
>> >> > the
>> >> > +input frame. Expressed in range of [0, 1<<bitdepth-1]
>> >> > +
>> >> > +@item pstdev.y, pstdev.u, pstdev.v, pstdev.r, pstdev.g, pstdev.b,
>> >> > pstdev.a
>> >> > +Display the Y/U/V/R/G/B/A plane standard deviation value percentage
>> >> > of
>> >> > maximum contained
>> >> > +within the input frame. Expressed in range of [0, 1]
>> >> > +@end table
>> >> > +
>> >> > +@subsection Examples
>> >> > +
>> >> > +@itemize
>> >> > +@item
>> >> > +Show all YUV color stats for each frame:
>> >> > +@example
>> >> > +ffprobe -f lavfi movie=example.mov,coloryuvstats=planes=0xf
>> >> > -show_frames
>> >> > +@end example
>> >> > +
>> >> > +@item
>> >> > +Draw graph for the pmean and pstdev value of the Y plane per frame:
>> >> > +@example
>> >> > +ffplay -i example.mov -vf
>> >> > coloryuvstats,drawgraph=m1=lavf.colorstats.pmean.y:m2=lavf.colorstats.pstdev.y:min=0:max=1
>> >> > +@end example
>> >> > +
>> >> > +@item
>> >> > +Print all RGB color stats for each frame:
>> >> > +@example
>> >> > +ffplay -i example.mov -vf
>> >> > colorrgbstats=planes=0xf,metadata=mode=print
>> >> > +@end example
>> >> > +
>> >> > +@end itemize
>> >> > +
>> >> >  @section convolution
>> >> >
>> >> >  Apply convolution of 3x3, 5x5, 7x7 or horizontal/vertical up to 49
>> >> > elements.
>> >> > diff --git a/libavfilter/Makefile b/libavfilter/Makefile
>> >> > index 37d4eee858..a007bd32d1 100644
>> >> > --- a/libavfilter/Makefile
>> >> > +++ b/libavfilter/Makefile
>> >> > @@ -182,6 +182,7 @@ OBJS-$(CONFIG_CIESCOPE_FILTER)               +=
>> >> > vf_ciescope.o
>> >> >  OBJS-$(CONFIG_CODECVIEW_FILTER)              += vf_codecview.o
>> >> >  OBJS-$(CONFIG_COLORBALANCE_FILTER)           += vf_colorbalance.o
>> >> >  OBJS-$(CONFIG_COLORCHANNELMIXER_FILTER)      +=
>> >> > vf_colorchannelmixer.o
>> >> > +OBJS-$(CONFIG_COLORSTATS_FILTER)             += vf_colorstats.o
>> >> >  OBJS-$(CONFIG_COLORKEY_FILTER)               += vf_colorkey.o
>> >> >  OBJS-$(CONFIG_COLORKEY_OPENCL_FILTER)        += vf_colorkey_opencl.o
>> >> > opencl.o \
>> >> >                                                  opencl/colorkey.o
>> >> > diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
>> >> > index c295f8e403..6b84a45452 100644
>> >> > --- a/libavfilter/allfilters.c
>> >> > +++ b/libavfilter/allfilters.c
>> >> > @@ -172,6 +172,9 @@ extern AVFilter ff_vf_ciescope;
>> >> >  extern AVFilter ff_vf_codecview;
>> >> >  extern AVFilter ff_vf_colorbalance;
>> >> >  extern AVFilter ff_vf_colorchannelmixer;
>> >> > +extern AVFilter ff_vf_colorstats;
>> >> > +extern AVFilter ff_vf_colorrgbstats;
>> >> > +extern AVFilter ff_vf_coloryuvstats;
>> >> >  extern AVFilter ff_vf_colorkey;
>> >> >  extern AVFilter ff_vf_colorkey_opencl;
>> >> >  extern AVFilter ff_vf_colorhold;
>> >> > diff --git a/libavfilter/vf_colorstats.c
>> >> > b/libavfilter/vf_colorstats.c
>> >> > new file mode 100644
>> >> > index 0000000000..7e94c572f9
>> >> > --- /dev/null
>> >> > +++ b/libavfilter/vf_colorstats.c
>> >> > @@ -0,0 +1,461 @@
>> >> > +/*
>> >> > + * This file is part of FFmpeg.
>> >> > + *
>> >> > + * FFmpeg is free software; you can redistribute it and/or
>> >> > + * modify it under the terms of the GNU Lesser General Public
>> >> > + * License as published by the Free Software Foundation; either
>> >> > + * version 2.1 of the License, or (at your option) any later
>> >> > version.
>> >> > + *
>> >> > + * FFmpeg is distributed in the hope that it will be useful,
>> >> > + * but WITHOUT ANY WARRANTY; without even the implied warranty of
>> >> > + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
>> >> > + * Lesser General Public License for more details.
>> >> > + *
>> >> > + * You should have received a copy of the GNU Lesser General Public
>> >> > + * License along with FFmpeg; if not, write to the Free Software
>> >> > + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
>> >> > 02110-1301
>> >> > USA
>> >> > + */
>> >> > +
>> >> > +#include "libavutil/imgutils.h"
>> >> > +#include "libavutil/opt.h"
>> >> > +#include "libavutil/pixdesc.h"
>> >> > +
>> >> > +#include "avfilter.h"
>> >> > +#include "drawutils.h"
>> >> > +#include "filters.h"
>> >> > +
>> >> > +#define R 0
>> >> > +#define G 1
>> >> > +#define B 2
>> >> > +#define A 3
>> >> > +
>> >> > +typedef struct ThreadData {
>> >> > +    AVFrame *in;
>> >> > +} ThreadData;
>> >> > +
>> >> > +typedef struct ColorStatsContext {
>> >> > +    const AVClass *class;
>> >> > +
>> >> > +    ptrdiff_t width[4];
>> >> > +    ptrdiff_t height[4];
>> >> > +
>> >> > +    int planes;
>> >> > +    int step;
>> >> > +    int bitdepth;
>> >> > +    int nb_components;
>> >> > +    int thread_count;
>> >> > +    int is_16bit;
>> >> > +    int is_rgb;
>> >> > +    int force_fmt; /* 0: all, 1: rgb, 2: yuv */
>> >> > +    uint8_t rgba_map[4];
>> >> > +    char comps[4];
>> >> > +
>> >> > +    double *mean[4];
>> >> > +    double *stdev[4];
>> >> > +
>> >> > +    int max;
>> >> > +    double *min_value[4];
>> >> > +    double *max_value[4];
>> >> > +
>> >> > +    int  (*stats_slice)(AVFilterContext *ctx, void *arg, int jobnr,
>> >> > int
>> >> > nb_jobs);
>> >> > +} ColorStatsContext;
>> >> > +
>> >> > +#define OFFSET(x) offsetof(ColorStatsContext, x)
>> >> > +#define V AV_OPT_FLAG_VIDEO_PARAM
>> >> > +#define F AV_OPT_FLAG_FILTERING_PARAM
>> >> > +
>> >> > +static const AVOption options[] = {
>> >> > +    { "planes", "set planes to filter", OFFSET(planes),
>> >> > AV_OPT_TYPE_INT,
>> >> > {.i64=1}, 1, 0xf, V|F},
>> >> > +    {NULL}
>> >> > +};
>> >> > +
>> >> > +#define YUV_FORMATS
>> >> > \
>> >> > +    AV_PIX_FMT_YUV444P,  AV_PIX_FMT_YUV422P,  AV_PIX_FMT_YUV420P,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV411P,  AV_PIX_FMT_YUV410P,  AV_PIX_FMT_YUV440P,
>> >> > \
>> >> > +    AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P,
>> >> > \
>> >> > +    AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ440P,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV422P9,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV420P9,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV422P10,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV440P10,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV440P12,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV444P16,
>> >> > \
>> >> > +    AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV420P16,
>> >> > \
>> >> > +    AV_PIX_FMT_YUVA420P,  AV_PIX_FMT_YUVA422P,
>> >> > \
>> >> > +    AV_PIX_FMT_YUVA444P16,AV_PIX_FMT_YUVA422P16,
>> >> > \
>> >> > +    AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P12,
>> >> > \
>> >> > +    AV_PIX_FMT_YUVA444P12,AV_PIX_FMT_YUVA444P,
>> >> > \
>> >> > +    AV_PIX_FMT_GRAY8,     AV_PIX_FMT_GRAY9,   AV_PIX_FMT_GRAY10,
>> >> > \
>> >> > +    AV_PIX_FMT_GRAY12,    AV_PIX_FMT_GRAY14,  AV_PIX_FMT_GRAY16
>> >> > +
>> >> > +#define RGB_FORMATS
>> >> > \
>> >> > +    AV_PIX_FMT_0RGB,      AV_PIX_FMT_0BGR,
>> >> > \
>> >> > +    AV_PIX_FMT_RGB0,      AV_PIX_FMT_BGR0,
>> >> > \
>> >> > +    AV_PIX_FMT_ARGB,      AV_PIX_FMT_RGBA,    AV_PIX_FMT_ABGR,
>> >> > \
>> >> > +    AV_PIX_FMT_BGRA,      AV_PIX_FMT_RGB24,   AV_PIX_FMT_BGR24,
>> >> > \
>> >> > +    AV_PIX_FMT_RGB48,     AV_PIX_FMT_RGBA64,  AV_PIX_FMT_GBRP,
>> >> > \
>> >> > +    AV_PIX_FMT_GBRAP,     AV_PIX_FMT_GBRP9,   AV_PIX_FMT_GBRP10,
>> >> > \
>> >> > +    AV_PIX_FMT_GBRAP10,   AV_PIX_FMT_GBRP12,  AV_PIX_FMT_GBRP14,
>> >> > \
>> >> > +    AV_PIX_FMT_GBRP16,    AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16
>> >> > +
>> >> > +static const enum AVPixelFormat yuv_pix_fmts[] = { YUV_FORMATS,
>> >> > AV_PIX_FMT_NONE };
>> >> > +static const enum AVPixelFormat rgb_pix_fmts[] = { RGB_FORMATS,
>> >> > AV_PIX_FMT_NONE };
>> >> > +static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS,
>> >> > YUV_FORMATS, AV_PIX_FMT_NONE };
>> >> > +
>> >> > +static int query_formats(AVFilterContext *ctx)
>> >> > +{
>> >> > +    const ColorStatsContext *s = ctx->priv;
>> >> > +    const enum AVPixelFormat *pix_fmts = s->force_fmt == 1 ?
>> >> > rgb_pix_fmts :
>> >> > +                                         s->force_fmt == 2 ?
>> >> > yuv_pix_fmts :
>> >> > +                                         all_pix_fmts;
>> >> > +
>> >> > +    AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
>> >> > +    if (!fmts_list)
>> >> > +        return AVERROR(ENOMEM);
>> >> > +    return ff_set_common_formats(ctx, fmts_list);
>> >> > +}
>> >> > +
>> >> > +#define DECLARE_STATS_PLANAR_FUNC(nbits, div)
>> >> >                       \
>> >> > +static int stats_slice_planar_##nbits(AVFilterContext *ctx, void
>> >> > *arg,
>> >> > int
>> >> > jobnr, int nb_jobs)    \
>> >> > +{
>> >> >                       \
>> >> > +    const ColorStatsContext *s = ctx->priv;
>> >> >                       \
>> >> > +    ThreadData *td = arg;
>> >> >                       \
>> >> > +    const AVFrame *in = td->in;
>> >> >                       \
>> >> > +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
>> >> >                       \
>> >> > +    int32_t count[4] = { 0 };
>> >> >                       \
>> >> > +    double min_value[4] = { s->max };
>> >> >                       \
>> >> > +    double max_value[4] = { 0 };
>> >> >                       \
>> >> > +
>> >> >                       \
>> >> > +    for (int i = 0; i < s->nb_components; i++) {
>> >> >                       \
>> >> > +        const int width = s->width[i];
>> >> >                       \
>> >> > +        const int height = s->height[i];
>> >> >                       \
>> >> > +        const int slice_start = (height *  jobnr     ) / nb_jobs;
>> >> >                       \
>> >> > +        const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
>> >> >                       \
>> >> > +        int linesize = in->linesize[i] / div;
>> >> >                       \
>> >> > +        uint##nbits##_t *src = (uint##nbits##_t*)in->data[i] +
>> >> > slice_start
>> >> > * linesize;            \
>> >> > +
>> >> >                       \
>> >> > +        if (!(s->planes & (1 << i)))
>> >> >                       \
>> >> > +            continue;
>> >> >                       \
>> >> > +        for (int j = slice_start; j < slice_end; j++) {
>> >> >                       \
>> >> > +            for (int x = 0; x < width; x++) {
>> >> >                       \
>> >> > +                sum[i] += src[x];
>> >> >                       \
>> >> > +                sum2[i] += src[x] * src[x];
>> >> >                       \
>> >> > +                if (src[i] > max_value[i]) max_value[i] = src[i];
>> >> >                       \
>> >> > +                if (src[i] < min_value[i]) min_value[i] = src[i];
>> >> >                       \
>> >> > +            }
>> >> >                       \
>> >> > +            count[i] += width;
>> >> >                       \
>> >> > +            src += linesize;
>> >> >                       \
>> >> > +        }
>> >> >                       \
>> >> > +
>> >> >                       \
>> >> > +        s->mean[i][jobnr] = (double)(sum[i] + count[i] / 2) /
>> >> > count[i];
>> >> >                       \
>> >> > +        s->stdev[i][jobnr] = sqrt((sum2[i] - sum[i] * (double)sum[i]
>> >> > /
>> >> > count[i]) / count[i]);     \
>> >> > +        s->min_value[i][jobnr] = min_value[i];
>> >> >                       \
>> >> > +        s->max_value[i][jobnr] = max_value[i];
>> >> >                       \
>> >> > +    }
>> >> >                       \
>> >> > +
>> >> >                       \
>> >> > +    return 0;
>> >> >                       \
>> >> > +}
>> >> > +DECLARE_STATS_PLANAR_FUNC(8, 1)
>> >> > +DECLARE_STATS_PLANAR_FUNC(16, 2)
>> >> > +
>> >> > +#define DECLARE_STATS_PACKED_FUNC(nbits, div)
>> >> >                       \
>> >> > +static int stats_slice_packed_##nbits(AVFilterContext *ctx, void
>> >> > *arg,
>> >> > int
>> >> > jobnr, int nb_jobs)    \
>> >> > +{
>> >> >                       \
>> >> > +    const ColorStatsContext *s = ctx->priv;
>> >> >                       \
>> >> > +    ThreadData *td = arg;
>> >> >                       \
>> >> > +    const AVFrame *in = td->in;
>> >> >                       \
>> >> > +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
>> >> >                       \
>> >> > +    double min_value[4] = { s->max };
>> >> >                       \
>> >> > +    double max_value[4] = { 0 };
>> >> >                       \
>> >> > +    int32_t count[4] = { 0 };
>> >> >                       \
>> >> > +    const int width = in->width;
>> >> >                       \
>> >> > +    const int height = in->height;
>> >> >                       \
>> >> > +    const int slice_start = (height *  jobnr     ) / nb_jobs;
>> >> >                       \
>> >> > +    const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
>> >> >                       \
>> >> > +    int linesize = in->linesize[0] / div;
>> >> >                       \
>> >> > +    uint##nbits##_t *src = (uint##nbits##_t*)in->data[0] +
>> >> > slice_start
>> >> > *
>> >> > linesize;                \
>> >> > +    const uint8_t ro = s->rgba_map[R];
>> >> >                       \
>> >> > +    const uint8_t go = s->rgba_map[G];
>> >> >                       \
>> >> > +    const uint8_t bo = s->rgba_map[B];
>> >> >                       \
>> >> > +    const uint8_t ao = s->rgba_map[A];
>> >> >                       \
>> >> > +
>> >> >                       \
>> >> > +    for (int y = slice_start; y < slice_end; y++) {
>> >> >                       \
>> >> > +        for (int x = 0; x < width * s->step; x += s->step) {
>> >> >                       \
>> >> > +            const int r = src[x + ro];
>> >> >                       \
>> >> > +            const int g = src[x + go];
>> >> >                       \
>> >> > +            const int b = src[x + bo];
>> >> >                       \
>> >> > +            const int a = src[x + ao];
>> >> >                       \
>> >> > +
>> >> >                       \
>> >> > +            sum[ro] += r;
>> >> >                       \
>> >> > +            sum[go] += g;
>> >> >                       \
>> >> > +            sum[bo] += b;
>> >> >                       \
>> >> > +            sum2[ro] += r * r;
>> >> >                       \
>> >> > +            sum2[go] += g * g;
>> >> >                       \
>> >> > +            sum2[bo] += b * b;
>> >> >                       \
>> >> > +
>> >> >                       \
>> >> > +            if (r > max_value[ro]) max_value[ro] = r;
>> >> >                       \
>> >> > +            if (r < min_value[ro]) min_value[ro] = r;
>> >> >                       \
>> >> > +            if (g > max_value[go]) max_value[go] = g;
>> >> >                       \
>> >> > +            if (g < min_value[go]) min_value[go] = g;
>> >> >                       \
>> >> > +            if (b > max_value[bo]) max_value[bo] = b;
>> >> >                       \
>> >> > +            if (b < min_value[bo]) min_value[bo] = b;
>> >> >                       \
>> >> > +            if (s->step == 4) {
>> >> >                       \
>> >> > +                sum2[ao] += a * a;
>> >> >                       \
>> >> > +                sum[ao] += a;
>> >> >                       \
>> >> > +                if (a > max_value[ao]) max_value[ao] = a;
>> >> >                       \
>> >> > +                if (a < min_value[ao]) min_value[ao] = a;
>> >> >                       \
>> >> > +            }
>> >> >                       \
>> >> > +        }
>> >> >                       \
>> >> > +        count[ro] += width;
>> >> >                       \
>> >> > +        count[go] += width;
>> >> >                       \
>> >> > +        count[bo] += width;
>> >> >                       \
>> >> > +        if (s->step == 4)
>> >> >                       \
>> >> > +            count[ao] += width;
>> >> >                       \
>> >> > +        src += linesize;
>> >> >                       \
>> >> > +    }
>> >> >                       \
>> >> > +
>> >> >                       \
>> >> > +    for (int p = 0; p < s->nb_components; p++) {
>> >> >                       \
>> >> > +        int ci = s->is_rgb ? s->rgba_map[p] : p;
>> >> >                       \
>> >> > +        double variance;
>> >> >                       \
>> >> > +
>> >> >                       \
>> >> > +        s->mean[ci][jobnr] = (double)(sum[ci] + count[ci] / 2) /
>> >> > count[ci];
>> >> >                       \
>> >> > +        variance = (sum2[ci] - sum[ci] * (double)sum[ci] /
>> >> > count[ci]) /
>> >> > count[ci];                \
>> >> > +        s->stdev[ci][jobnr] = sqrt(variance);
>> >> >                       \
>> >> > +        s->min_value[ci][jobnr] = min_value[ci];
>> >> >                       \
>> >> > +        s->max_value[ci][jobnr] = max_value[ci];
>> >> >                       \
>> >> > +    }
>> >> >                       \
>> >> > +
>> >> >                       \
>> >> > +    return 0;
>> >> >                       \
>> >> > +}
>> >> > +DECLARE_STATS_PACKED_FUNC(8, 1)
>> >> > +DECLARE_STATS_PACKED_FUNC(16, 2)
>> >> > +
>> >> > +static av_cold void uninit(AVFilterContext *ctx)
>> >> > +{
>> >> > +    ColorStatsContext *s = ctx->priv;
>> >> > +
>> >> > +    for (int i = 0; i < s->nb_components; i++) {
>> >> > +        av_freep(&s->mean[i]);
>> >> > +        av_freep(&s->stdev[i]);
>> >> > +        av_freep(&s->min_value[i]);
>> >> > +        av_freep(&s->max_value[i]);
>> >> > +    }
>> >> > +}
>> >> > +
>> >> > +static int config_input(AVFilterLink *inlink)
>> >> > +{
>> >> > +    AVFilterContext *ctx = inlink->dst;
>> >> > +    ColorStatsContext *s = ctx->priv;
>> >> > +    const AVPixFmtDescriptor *desc =
>> >> > av_pix_fmt_desc_get(inlink->format);
>> >> > +
>> >> > +    s->nb_components = desc->nb_components;
>> >> > +    s->bitdepth = desc->comp[0].depth;
>> >> > +    s->is_16bit = s->bitdepth > 8;
>> >> > +    s->step = av_get_padded_bits_per_pixel(desc) >> (3 +
>> >> > s->is_16bit);
>> >> > +    s->max  = 1 << s->bitdepth - 1;
>> >> > +
>> >> > +    s->is_rgb = ff_fill_rgba_map(s->rgba_map, inlink->format) >= 0;
>> >> > +    s->comps[0] = s->is_rgb ? 'r' : 'y' ;
>> >> > +    s->comps[1] = s->is_rgb ? 'g' : 'u' ;
>> >> > +    s->comps[2] = s->is_rgb ? 'b' : 'v' ;
>> >> > +    s->comps[3] = 'a';
>> >> > +
>> >> > +    s->thread_count = FFMAX(1, FFMIN(inlink->h,
>> >> > ff_filter_get_nb_threads(ctx)));
>> >> > +    for (int i = 0; i < s->nb_components; i++) {
>> >> > +        ptrdiff_t line_size = av_image_get_linesize(inlink->format,
>> >> > inlink->w, i);
>> >> > +
>> >> > +        s->width[i] = line_size >> (s->bitdepth > 8);
>> >> > +        s->height[i] = inlink->h >> ((i == 1 || i == 2) ?
>> >> > desc->log2_chroma_h : 0);
>> >> > +
>> >> > +        s->mean[i] = av_mallocz_array(s->thread_count,
>> >> > sizeof(*s->mean[i]));
>> >> > +        s->stdev[i] = av_mallocz_array(s->thread_count,
>> >> > sizeof(*s->stdev[i]));
>> >> > +        s->max_value[i] = av_mallocz_array(s->thread_count,
>> >> > sizeof(*s->max_value[i]));
>> >> > +        s->min_value[i] = av_mallocz_array(s->thread_count,
>> >> > sizeof(*s->min_value[i]));
>> >> > +        if (!s->mean[i] || !s->stdev[i] || !s->max_value[i] ||
>> >> > !s->min_value[i])
>> >> > +            return AVERROR(ENOMEM);
>> >> > +        for (int j = 0; j < s->thread_count; j++) {
>> >> > +            s->min_value[i][j] = (1 << s->bitdepth);
>> >> > +            s->max_value[i][j] = 0;
>> >> > +        }
>> >> > +    }
>> >> > +
>> >> > +    if (desc->flags & AV_PIX_FMT_FLAG_PLANAR)
>> >> > +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_planar_8 :
>> >> > stats_slice_planar_16;
>> >> > +    else
>> >> > +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_packed_8 :
>> >> > stats_slice_packed_16;
>> >> > +
>> >> > +    return 0;
>> >> > +}
>> >> > +
>> >> > +static void set_meta_float(AVDictionary **metadata, const char *key,
>> >> > char
>> >> > c, float d)
>> >> > +{
>> >> > +    char value[128];
>> >> > +    char key2[128];
>> >> > +
>> >> > +    snprintf(value, sizeof(value), "%.2f", d);
>> >> > +    if (c)
>> >> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key,
>> >> > c);
>> >> > +    else
>> >> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
>> >> > +    av_dict_set(metadata, key2, value, 0);
>> >> > +}
>> >> > +
>> >> > +static void set_meta_int(AVDictionary **metadata, const char *key,
>> >> > char
>> >> > c,
>> >> > int d)
>> >> > +{
>> >> > +    char value[128];
>> >> > +    char key2[128];
>> >> > +
>> >> > +    snprintf(value, sizeof(value), "%d", d);
>> >> > +    if (c)
>> >> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key,
>> >> > c);
>> >> > +    else
>> >> > +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
>> >> > +    av_dict_set(metadata, key2, value, 0);
>> >> > +}
>> >> > +
>> >> > +static void report_detect_result(AVFilterContext *ctx, AVFrame *in)
>> >> > +{
>> >> > +    const ColorStatsContext *s = ctx->priv;
>> >> > +    double mean[4] = { 0 };
>> >> > +    double stdev[4] = { 0 };
>> >> > +    double min_value[4] = { s->max };
>> >> > +    double max_value[4] = { 0 };
>> >> > +    int cidx;
>> >> > +
>> >> > +    for (int p = 0; p < s->nb_components; p++) {
>> >> > +        cidx = s->is_rgb ? s->rgba_map[p] : p;
>> >> > +
>> >> > +        if (!(s->planes & (1 << p)))
>> >> > +            continue;
>> >> > +
>> >> > +        for (int j = 0; j < s->thread_count; j++) {
>> >> > +            mean[cidx] += s->mean[cidx][j];
>> >> > +            stdev[cidx] += s->stdev[cidx][j];
>> >> > +            if (s->min_value[cidx][j] < min_value[cidx])
>> >> > +                min_value[cidx] = s->min_value[cidx][j];
>> >> > +            if (s->max_value[cidx][j] > max_value[cidx])
>> >> > +                max_value[cidx] = s->max_value[cidx][j];
>> >> > +        }
>> >> > +        mean[cidx] = mean[cidx] / s->thread_count;
>> >> > +        stdev[cidx] = stdev[cidx] / s->thread_count;
>> >> > +
>> >> > +        set_meta_int(&in->metadata, "min", s->comps[p],
>> >> > min_value[cidx]);
>> >> > +        set_meta_int(&in->metadata, "max", s->comps[p],
>> >> > max_value[cidx]);
>> >> > +        set_meta_int(&in->metadata, "mean", s->comps[p],
>> >> > mean[cidx]);
>> >> > +        set_meta_int(&in->metadata, "stdev", s->comps[p],
>> >> > stdev[cidx]);
>> >> > +
>> >> > +        set_meta_float(&in->metadata, "pmin", s->comps[p],
>> >> > min_value[cidx]
>> >> > / s->max);
>> >> > +        set_meta_float(&in->metadata, "pmax", s->comps[p],
>> >> > max_value[cidx]
>> >> > / s->max);
>> >> > +        set_meta_float(&in->metadata, "pmean", s->comps[p],
>> >> > mean[cidx]
>> >> > /
>> >> > s->max);
>> >> > +        set_meta_float(&in->metadata, "pstdev", s->comps[p],
>> >> > stdev[cidx] /
>> >> > s->max);
>> >> > +    }
>> >> > +}
>> >> > +
>> >> > +static int activate(AVFilterContext *ctx)
>> >> > +{
>> >> > +    int ret;
>> >> > +    AVFilterLink *inlink = ctx->inputs[0];
>> >> > +    AVFilterLink *outlink = ctx->outputs[0];
>> >> > +    ColorStatsContext *s = ctx->priv;
>> >> > +    AVFrame *in;
>> >> > +    ThreadData td;
>> >> > +
>> >> > +    FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
>> >> > +
>> >> > +    ret = ff_inlink_consume_frame(inlink, &in);
>> >> > +    if (ret < 0)
>> >> > +        return ret;
>> >> > +
>> >> > +    if (in) {
>> >> > +        td.in = in;
>> >> > +        ctx->internal->execute(ctx, s->stats_slice, &td, NULL,
>> >> > s->thread_count);
>> >> > +
>> >> > +        report_detect_result(ctx, in);
>> >> > +        return ff_filter_frame(outlink, in);
>> >> > +    }
>> >> > +
>> >> > +    FF_FILTER_FORWARD_STATUS(inlink, outlink);
>> >> > +    FF_FILTER_FORWARD_WANTED(outlink, inlink);
>> >> > +
>> >> > +    return FFERROR_NOT_READY;
>> >> > +}
>> >> > +
>> >> > +static const AVFilterPad inputs[] = {
>> >> > +    {
>> >> > +        .name         = "default",
>> >> > +        .type         = AVMEDIA_TYPE_VIDEO,
>> >> > +        .config_props = config_input,
>> >> > +    },
>> >> > +    { NULL }
>> >> > +};
>> >> > +
>> >> > +static const AVFilterPad outputs[] = {
>> >> > +    {
>> >> > +        .name          = "default",
>> >> > +        .type          = AVMEDIA_TYPE_VIDEO,
>> >> > +    },
>> >> > +    { NULL }
>> >> > +};
>> >> > +
>> >> > +#define DEFINE_COLOR_FILTER(name_, description_)
>> >> > \
>> >> > +    AVFilter ff_vf_##name_ = {
>> >> > \
>> >> > +        .name          = #name_,
>> >> > \
>> >> > +        .description   = NULL_IF_CONFIG_SMALL(description_),
>> >> > \
>> >> > +        .priv_size     = sizeof(ColorStatsContext),
>> >> > \
>> >> > +        .priv_class    = &name_ ## _class,
>> >> > \
>> >> > +        .init          = name_##_init,
>> >> > \
>> >> > +        .uninit        = uninit,
>> >> > \
>> >> > +        .query_formats = query_formats,
>> >> > \
>> >> > +        .inputs        = inputs,
>> >> > \
>> >> > +        .outputs       = outputs,
>> >> > \
>> >> > +        .activate      = activate,
>> >> > \
>> >> > +        .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC |
>> >> > \
>> >> > +                         AVFILTER_FLAG_SLICE_THREADS,
>> >> > \
>> >> > +    }
>> >> > +
>> >> > +#if CONFIG_COLORSTATS_FILTER
>> >> > +
>> >> > +#define colorstats_options options
>> >> > +AVFILTER_DEFINE_CLASS(colorstats);
>> >> > +
>> >> > +static int colorstats_init(AVFilterContext *ctx)
>> >> > +{
>> >> > +    return 0;
>> >> > +}
>> >> > +
>> >> > +DEFINE_COLOR_FILTER(colorstats, "Video color stats.");
>> >> > +#endif
>> >> > +
>> >> > +#if CONFIG_COLORRGBSTATS_FILTER
>> >> > +
>> >> > +#define colorrgbstats_options options
>> >> > +AVFILTER_DEFINE_CLASS(colorrgbstats);
>> >> > +
>> >> > +static int colorrgbstats_init(AVFilterContext *ctx)
>> >> > +{
>> >> > +    ColorStatsContext *s = ctx->priv;
>> >> > +
>> >> > +    s->force_fmt = 1;
>> >> > +    return 0;
>> >> > +}
>> >> > +
>> >> > +DEFINE_COLOR_FILTER(colorrgbstats, "Video RGB color stats.");
>> >> > +#endif
>> >> > +
>> >> > +#if CONFIG_COLORYUVSTATS_FILTER
>> >> > +
>> >> > +#define coloryuvstats_options options
>> >> > +AVFILTER_DEFINE_CLASS(coloryuvstats);
>> >> > +
>> >> > +static int coloryuvstats_init(AVFilterContext *ctx)
>> >> > +{
>> >> > +    ColorStatsContext *s = ctx->priv;
>> >> > +
>> >> > +    s->force_fmt = 2;
>> >> > +    return 0;
>> >> > +}
>> >> > +
>> >> > +DEFINE_COLOR_FILTER(coloryuvstats, "Video YUV color stats.");
>> >> > +#endif
>> >> > --
>> >> > 2.21.0
>> >> >
>> >> > _______________________________________________
>> >> > ffmpeg-devel mailing list
>> >> > ffmpeg-devel@ffmpeg.org
>> >> > https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>> >> >
>> >> > To unsubscribe, visit link above, or email
>> >> > ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
>> >
>> > --
>> > Thanks,
>> > Limin Wang
>> > _______________________________________________
>> > ffmpeg-devel mailing list
>> > ffmpeg-devel@ffmpeg.org
>> > https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>> >
>> > To unsubscribe, visit link above, or email
>> > ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
>> _______________________________________________
>> ffmpeg-devel mailing list
>> ffmpeg-devel@ffmpeg.org
>> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>>
>> To unsubscribe, visit link above, or email
>> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
>
> --
> Thanks,
> Limin Wang
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
Dave Rice Dec. 30, 2019, 9:36 p.m. UTC | #6
> On Dec 27, 2019, at 10:49 AM, Paul B Mahol <onemda@gmail.com> wrote:
> 
> That is because signalstats is doing more stuff.

signalstats includes options to disable some of the calculations, possibly this could be extended to enable or disable the ones you want. It would be interesting to merge these ideas rather than have two filters with such a substantial overlap.

> On 12/27/19, Limin Wang <lance.lmwang@gmail.com> wrote:
>> On Fri, Dec 27, 2019 at 03:20:19PM +0100, Paul B Mahol wrote:
>>> On 12/27/19, Limin Wang <lance.lmwang@gmail.com> wrote:
>>>> On Fri, Dec 27, 2019 at 12:35:25PM +0100, Paul B Mahol wrote:
>>>>> You are duplicating some functionality of signalstats filter.
>>>>> 
>>>> Yes, I have other function need to use the mean and stdev which is
>>>> support in showinfo filter(only 8bit and don't support packed format,
>>>> no multi-thread), and signalstats don't support rgb format and don't
>>>> have stdev, also it have too many other function and difficult to change
>>>> it, so I think it's more simple to create a new filter to do it.
>>>> 
>>> 
>>> No, unacceptable. use signalstats filter.
>> 
>> The performance is one major reason also, below is the profiling result for
>> performance:
>> 
>> ./ffmpeg -nostats -f lavfi -i testsrc2=4k:d=2 -vf
>> bench=start,signalstats,bench=stop -f null -
>> [bench @ 0x3fb9080] t:0.161589 avg:0.165756 max:0.169923 min:0.161589
>> [bench @ 0x3fb9080] t:0.160334 avg:0.163948 max:0.169923 min:0.160334
>> [bench @ 0x3fb9080] t:0.160345 avg:0.163047 max:0.169923 min:0.160334
>> [bench @ 0x3fb9080] t:0.160924 avg:0.162623 max:0.169923 min:0.160334
>> [bench @ 0x3fb9080] t:0.160318 avg:0.162238 max:0.169923 min:0.160318
>> 
>> ./ffmpeg -nostats -f lavfi -i testsrc2=4k:d=2 -vf
>> bench=start,colorstats,bench=stop -f null -
>> [bench @ 0x26f6100] t:0.012596 avg:0.012612 max:0.012628 min:0.012596
>> [bench @ 0x26f6100] t:0.012542 avg:0.012588 max:0.012628 min:0.012542
>> [bench @ 0x26f6100] t:0.012529 avg:0.012573 max:0.012628 min:0.012529
>> [bench @ 0x26f6100] t:0.012532 avg:0.012565 max:0.012628 min:0.012529
>> [bench @ 0x26f6100] t:0.012527 avg:0.012559 max:0.012628 min:0.012527
>> [bench @ 0x26f6100] t:0.012525 avg:0.012554 max:0.012628 min:0.012525
>> [bench @ 0x26f6100] t:0.012522 avg:0.012550 max:0.012628 min:0.012522
>> [bench @ 0x26f6100] t:0.012552 avg:0.012550 max:0.012628 min:0.012522
>> 
>> 
>>> 
>>>> 
>>>>> On 12/27/19, lance.lmwang@gmail.com <lance.lmwang@gmail.com> wrote:
>>>>>> From: Limin Wang <lance.lmwang@gmail.com>
>>>>>> 
>>>>>> Signed-off-by: Limin Wang <lance.lmwang@gmail.com>
>>>>>> ---
>>>>>> doc/filters.texi            |  74 ++++++
>>>>>> libavfilter/Makefile        |   1 +
>>>>>> libavfilter/allfilters.c    |   3 +
>>>>>> libavfilter/vf_colorstats.c | 461
>>>>>> ++++++++++++++++++++++++++++++++++++
>>>>>> 4 files changed, 539 insertions(+)
>>>>>> create mode 100644 libavfilter/vf_colorstats.c
>>>>>> 
>>>>>> diff --git a/doc/filters.texi b/doc/filters.texi
>>>>>> index 8c5d3a5760..81968b2c17 100644
>>>>>> --- a/doc/filters.texi
>>>>>> +++ b/doc/filters.texi
>>>>>> @@ -7695,6 +7695,80 @@ For example to convert the input to
>>>>>> SMPTE-240M,
>>>>>> use
>>>>>> the command:
>>>>>> colorspace=smpte240m
>>>>>> @end example
>>>>>> 
>>>>>> +@section colorstats, colorrgbstats, coloryuvstats
>>>>>> +The filter provides statistical video measurements such as mean,
>>>>>> minimum,
>>>>>> maximum and
>>>>>> +standard deviation for each frame. The user can check for
>>>>>> unexpected/accidental errors
>>>>>> +very quickly with them.
>>>>>> +
>>>>>> +@var{colorrgbstats} report the color stats for RGB input video,
>>>>>> @var{coloryuvstats}
>>>>>> +to an YUV input video.
>>>>>> +
>>>>>> +These filters accept the following parameters:
>>>>>> +@table @option
>>>>>> +@item planes
>>>>>> +Set which planes to filter. Default is only the first plane.
>>>>>> +@end table
>>>>>> +
>>>>>> +By default the filter will report these metadata values if the
>>>>>> planes
>>>>>> +are processed:
>>>>>> +
>>>>>> +@table @option
>>>>>> +@item min.y, min.u, min.v, min.r, min.g, min.b, min.a
>>>>>> +Display the minimal Y/U/V/R/G/B/A plane value contained within the
>>>>>> input
>>>>>> frame.
>>>>>> +Expressed in range of [0, 1<<bitdepth-1]
>>>>>> +
>>>>>> +@item pmin.y, pmin.u, pmin.v, pmin.r, pmin.g, pmin.b, min.a
>>>>>> +Display the minimal Y/U/V/R/G/B/A plane percentage of maximum
>>>>>> contained
>>>>>> within
>>>>>> +the input frame. Expressed in range of [0, 1]
>>>>>> +
>>>>>> +@item max.y, max.u, max.v, max.r, max.g, max.b, max.a
>>>>>> +Display the maximum Y/U/V/R/G/B/A plane value contained within the
>>>>>> input
>>>>>> frame.
>>>>>> +Expressed in range of [0, 1<<bitdepth-1]
>>>>>> +
>>>>>> +@item pmax.y, pmax.u, pmax.v, pmax.r, pmax.g, pmax.b, pmax.a
>>>>>> +Display the maximum Y/U/V/R/G/B/A plane percentage of maximum
>>>>>> contained
>>>>>> within
>>>>>> +the input frame. Expressed in range of [0, 1]
>>>>>> +
>>>>>> +@item mean.y, mean.u, mean.v, mean.r, mean.g, mean.b, mean.a
>>>>>> +Display the Y/U/V/R/G/B/A plane mean value contained within the
>>>>>> input
>>>>>> frame.
>>>>>> +Expressed in range of [0, 1<<bitdepth-1]
>>>>>> +
>>>>>> +@item pmean.y, pmean.u, pmean.v, pmean.r, pmean.g, pmean.b, pmean.a
>>>>>> +Display the Y/U/V/R/G/B/A plane mean value percentage of maximum
>>>>>> contained
>>>>>> within
>>>>>> +the input frame. Expressed in range of [0, 1]
>>>>>> +
>>>>>> +@item stdev.y, stdev.u, stdev.v, stdev.r, stdev.g, stdev.b, stdev.a
>>>>>> +Display the Y/U/V/R/G/B/A plane standard deviation value contained
>>>>>> within
>>>>>> the
>>>>>> +input frame. Expressed in range of [0, 1<<bitdepth-1]
>>>>>> +
>>>>>> +@item pstdev.y, pstdev.u, pstdev.v, pstdev.r, pstdev.g, pstdev.b,
>>>>>> pstdev.a
>>>>>> +Display the Y/U/V/R/G/B/A plane standard deviation value percentage
>>>>>> of
>>>>>> maximum contained
>>>>>> +within the input frame. Expressed in range of [0, 1]
>>>>>> +@end table
>>>>>> +
>>>>>> +@subsection Examples
>>>>>> +
>>>>>> +@itemize
>>>>>> +@item
>>>>>> +Show all YUV color stats for each frame:
>>>>>> +@example
>>>>>> +ffprobe -f lavfi movie=example.mov,coloryuvstats=planes=0xf
>>>>>> -show_frames
>>>>>> +@end example
>>>>>> +
>>>>>> +@item
>>>>>> +Draw graph for the pmean and pstdev value of the Y plane per frame:
>>>>>> +@example
>>>>>> +ffplay -i example.mov -vf
>>>>>> coloryuvstats,drawgraph=m1=lavf.colorstats.pmean.y:m2=lavf.colorstats.pstdev.y:min=0:max=1
>>>>>> +@end example
>>>>>> +
>>>>>> +@item
>>>>>> +Print all RGB color stats for each frame:
>>>>>> +@example
>>>>>> +ffplay -i example.mov -vf
>>>>>> colorrgbstats=planes=0xf,metadata=mode=print
>>>>>> +@end example
>>>>>> +
>>>>>> +@end itemize
>>>>>> +
>>>>>> @section convolution
>>>>>> 
>>>>>> Apply convolution of 3x3, 5x5, 7x7 or horizontal/vertical up to 49
>>>>>> elements.
>>>>>> diff --git a/libavfilter/Makefile b/libavfilter/Makefile
>>>>>> index 37d4eee858..a007bd32d1 100644
>>>>>> --- a/libavfilter/Makefile
>>>>>> +++ b/libavfilter/Makefile
>>>>>> @@ -182,6 +182,7 @@ OBJS-$(CONFIG_CIESCOPE_FILTER)               +=
>>>>>> vf_ciescope.o
>>>>>> OBJS-$(CONFIG_CODECVIEW_FILTER)              += vf_codecview.o
>>>>>> OBJS-$(CONFIG_COLORBALANCE_FILTER)           += vf_colorbalance.o
>>>>>> OBJS-$(CONFIG_COLORCHANNELMIXER_FILTER)      +=
>>>>>> vf_colorchannelmixer.o
>>>>>> +OBJS-$(CONFIG_COLORSTATS_FILTER)             += vf_colorstats.o
>>>>>> OBJS-$(CONFIG_COLORKEY_FILTER)               += vf_colorkey.o
>>>>>> OBJS-$(CONFIG_COLORKEY_OPENCL_FILTER)        += vf_colorkey_opencl.o
>>>>>> opencl.o \
>>>>>>                                                 opencl/colorkey.o
>>>>>> diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
>>>>>> index c295f8e403..6b84a45452 100644
>>>>>> --- a/libavfilter/allfilters.c
>>>>>> +++ b/libavfilter/allfilters.c
>>>>>> @@ -172,6 +172,9 @@ extern AVFilter ff_vf_ciescope;
>>>>>> extern AVFilter ff_vf_codecview;
>>>>>> extern AVFilter ff_vf_colorbalance;
>>>>>> extern AVFilter ff_vf_colorchannelmixer;
>>>>>> +extern AVFilter ff_vf_colorstats;
>>>>>> +extern AVFilter ff_vf_colorrgbstats;
>>>>>> +extern AVFilter ff_vf_coloryuvstats;
>>>>>> extern AVFilter ff_vf_colorkey;
>>>>>> extern AVFilter ff_vf_colorkey_opencl;
>>>>>> extern AVFilter ff_vf_colorhold;
>>>>>> diff --git a/libavfilter/vf_colorstats.c
>>>>>> b/libavfilter/vf_colorstats.c
>>>>>> new file mode 100644
>>>>>> index 0000000000..7e94c572f9
>>>>>> --- /dev/null
>>>>>> +++ b/libavfilter/vf_colorstats.c
>>>>>> @@ -0,0 +1,461 @@
>>>>>> +/*
>>>>>> + * This file is part of FFmpeg.
>>>>>> + *
>>>>>> + * FFmpeg is free software; you can redistribute it and/or
>>>>>> + * modify it under the terms of the GNU Lesser General Public
>>>>>> + * License as published by the Free Software Foundation; either
>>>>>> + * version 2.1 of the License, or (at your option) any later
>>>>>> version.
>>>>>> + *
>>>>>> + * FFmpeg is distributed in the hope that it will be useful,
>>>>>> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
>>>>>> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
>>>>>> + * Lesser General Public License for more details.
>>>>>> + *
>>>>>> + * You should have received a copy of the GNU Lesser General Public
>>>>>> + * License along with FFmpeg; if not, write to the Free Software
>>>>>> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
>>>>>> 02110-1301
>>>>>> USA
>>>>>> + */
>>>>>> +
>>>>>> +#include "libavutil/imgutils.h"
>>>>>> +#include "libavutil/opt.h"
>>>>>> +#include "libavutil/pixdesc.h"
>>>>>> +
>>>>>> +#include "avfilter.h"
>>>>>> +#include "drawutils.h"
>>>>>> +#include "filters.h"
>>>>>> +
>>>>>> +#define R 0
>>>>>> +#define G 1
>>>>>> +#define B 2
>>>>>> +#define A 3
>>>>>> +
>>>>>> +typedef struct ThreadData {
>>>>>> +    AVFrame *in;
>>>>>> +} ThreadData;
>>>>>> +
>>>>>> +typedef struct ColorStatsContext {
>>>>>> +    const AVClass *class;
>>>>>> +
>>>>>> +    ptrdiff_t width[4];
>>>>>> +    ptrdiff_t height[4];
>>>>>> +
>>>>>> +    int planes;
>>>>>> +    int step;
>>>>>> +    int bitdepth;
>>>>>> +    int nb_components;
>>>>>> +    int thread_count;
>>>>>> +    int is_16bit;
>>>>>> +    int is_rgb;
>>>>>> +    int force_fmt; /* 0: all, 1: rgb, 2: yuv */
>>>>>> +    uint8_t rgba_map[4];
>>>>>> +    char comps[4];
>>>>>> +
>>>>>> +    double *mean[4];
>>>>>> +    double *stdev[4];
>>>>>> +
>>>>>> +    int max;
>>>>>> +    double *min_value[4];
>>>>>> +    double *max_value[4];
>>>>>> +
>>>>>> +    int  (*stats_slice)(AVFilterContext *ctx, void *arg, int jobnr,
>>>>>> int
>>>>>> nb_jobs);
>>>>>> +} ColorStatsContext;
>>>>>> +
>>>>>> +#define OFFSET(x) offsetof(ColorStatsContext, x)
>>>>>> +#define V AV_OPT_FLAG_VIDEO_PARAM
>>>>>> +#define F AV_OPT_FLAG_FILTERING_PARAM
>>>>>> +
>>>>>> +static const AVOption options[] = {
>>>>>> +    { "planes", "set planes to filter", OFFSET(planes),
>>>>>> AV_OPT_TYPE_INT,
>>>>>> {.i64=1}, 1, 0xf, V|F},
>>>>>> +    {NULL}
>>>>>> +};
>>>>>> +
>>>>>> +#define YUV_FORMATS
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV444P,  AV_PIX_FMT_YUV422P,  AV_PIX_FMT_YUV420P,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV411P,  AV_PIX_FMT_YUV410P,  AV_PIX_FMT_YUV440P,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ440P,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV422P9,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV420P9,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV422P10,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV440P10,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV440P12,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV444P16,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV420P16,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUVA420P,  AV_PIX_FMT_YUVA422P,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUVA444P16,AV_PIX_FMT_YUVA422P16,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P12,
>>>>>> \
>>>>>> +    AV_PIX_FMT_YUVA444P12,AV_PIX_FMT_YUVA444P,
>>>>>> \
>>>>>> +    AV_PIX_FMT_GRAY8,     AV_PIX_FMT_GRAY9,   AV_PIX_FMT_GRAY10,
>>>>>> \
>>>>>> +    AV_PIX_FMT_GRAY12,    AV_PIX_FMT_GRAY14,  AV_PIX_FMT_GRAY16
>>>>>> +
>>>>>> +#define RGB_FORMATS
>>>>>> \
>>>>>> +    AV_PIX_FMT_0RGB,      AV_PIX_FMT_0BGR,
>>>>>> \
>>>>>> +    AV_PIX_FMT_RGB0,      AV_PIX_FMT_BGR0,
>>>>>> \
>>>>>> +    AV_PIX_FMT_ARGB,      AV_PIX_FMT_RGBA,    AV_PIX_FMT_ABGR,
>>>>>> \
>>>>>> +    AV_PIX_FMT_BGRA,      AV_PIX_FMT_RGB24,   AV_PIX_FMT_BGR24,
>>>>>> \
>>>>>> +    AV_PIX_FMT_RGB48,     AV_PIX_FMT_RGBA64,  AV_PIX_FMT_GBRP,
>>>>>> \
>>>>>> +    AV_PIX_FMT_GBRAP,     AV_PIX_FMT_GBRP9,   AV_PIX_FMT_GBRP10,
>>>>>> \
>>>>>> +    AV_PIX_FMT_GBRAP10,   AV_PIX_FMT_GBRP12,  AV_PIX_FMT_GBRP14,
>>>>>> \
>>>>>> +    AV_PIX_FMT_GBRP16,    AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16
>>>>>> +
>>>>>> +static const enum AVPixelFormat yuv_pix_fmts[] = { YUV_FORMATS,
>>>>>> AV_PIX_FMT_NONE };
>>>>>> +static const enum AVPixelFormat rgb_pix_fmts[] = { RGB_FORMATS,
>>>>>> AV_PIX_FMT_NONE };
>>>>>> +static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS,
>>>>>> YUV_FORMATS, AV_PIX_FMT_NONE };
>>>>>> +
>>>>>> +static int query_formats(AVFilterContext *ctx)
>>>>>> +{
>>>>>> +    const ColorStatsContext *s = ctx->priv;
>>>>>> +    const enum AVPixelFormat *pix_fmts = s->force_fmt == 1 ?
>>>>>> rgb_pix_fmts :
>>>>>> +                                         s->force_fmt == 2 ?
>>>>>> yuv_pix_fmts :
>>>>>> +                                         all_pix_fmts;
>>>>>> +
>>>>>> +    AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
>>>>>> +    if (!fmts_list)
>>>>>> +        return AVERROR(ENOMEM);
>>>>>> +    return ff_set_common_formats(ctx, fmts_list);
>>>>>> +}
>>>>>> +
>>>>>> +#define DECLARE_STATS_PLANAR_FUNC(nbits, div)
>>>>>>                      \
>>>>>> +static int stats_slice_planar_##nbits(AVFilterContext *ctx, void
>>>>>> *arg,
>>>>>> int
>>>>>> jobnr, int nb_jobs)    \
>>>>>> +{
>>>>>>                      \
>>>>>> +    const ColorStatsContext *s = ctx->priv;
>>>>>>                      \
>>>>>> +    ThreadData *td = arg;
>>>>>>                      \
>>>>>> +    const AVFrame *in = td->in;
>>>>>>                      \
>>>>>> +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
>>>>>>                      \
>>>>>> +    int32_t count[4] = { 0 };
>>>>>>                      \
>>>>>> +    double min_value[4] = { s->max };
>>>>>>                      \
>>>>>> +    double max_value[4] = { 0 };
>>>>>>                      \
>>>>>> +
>>>>>>                      \
>>>>>> +    for (int i = 0; i < s->nb_components; i++) {
>>>>>>                      \
>>>>>> +        const int width = s->width[i];
>>>>>>                      \
>>>>>> +        const int height = s->height[i];
>>>>>>                      \
>>>>>> +        const int slice_start = (height *  jobnr     ) / nb_jobs;
>>>>>>                      \
>>>>>> +        const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
>>>>>>                      \
>>>>>> +        int linesize = in->linesize[i] / div;
>>>>>>                      \
>>>>>> +        uint##nbits##_t *src = (uint##nbits##_t*)in->data[i] +
>>>>>> slice_start
>>>>>> * linesize;            \
>>>>>> +
>>>>>>                      \
>>>>>> +        if (!(s->planes & (1 << i)))
>>>>>>                      \
>>>>>> +            continue;
>>>>>>                      \
>>>>>> +        for (int j = slice_start; j < slice_end; j++) {
>>>>>>                      \
>>>>>> +            for (int x = 0; x < width; x++) {
>>>>>>                      \
>>>>>> +                sum[i] += src[x];
>>>>>>                      \
>>>>>> +                sum2[i] += src[x] * src[x];
>>>>>>                      \
>>>>>> +                if (src[i] > max_value[i]) max_value[i] = src[i];
>>>>>>                      \
>>>>>> +                if (src[i] < min_value[i]) min_value[i] = src[i];
>>>>>>                      \
>>>>>> +            }
>>>>>>                      \
>>>>>> +            count[i] += width;
>>>>>>                      \
>>>>>> +            src += linesize;
>>>>>>                      \
>>>>>> +        }
>>>>>>                      \
>>>>>> +
>>>>>>                      \
>>>>>> +        s->mean[i][jobnr] = (double)(sum[i] + count[i] / 2) /
>>>>>> count[i];
>>>>>>                      \
>>>>>> +        s->stdev[i][jobnr] = sqrt((sum2[i] - sum[i] * (double)sum[i]
>>>>>> /
>>>>>> count[i]) / count[i]);     \
>>>>>> +        s->min_value[i][jobnr] = min_value[i];
>>>>>>                      \
>>>>>> +        s->max_value[i][jobnr] = max_value[i];
>>>>>>                      \
>>>>>> +    }
>>>>>>                      \
>>>>>> +
>>>>>>                      \
>>>>>> +    return 0;
>>>>>>                      \
>>>>>> +}
>>>>>> +DECLARE_STATS_PLANAR_FUNC(8, 1)
>>>>>> +DECLARE_STATS_PLANAR_FUNC(16, 2)
>>>>>> +
>>>>>> +#define DECLARE_STATS_PACKED_FUNC(nbits, div)
>>>>>>                      \
>>>>>> +static int stats_slice_packed_##nbits(AVFilterContext *ctx, void
>>>>>> *arg,
>>>>>> int
>>>>>> jobnr, int nb_jobs)    \
>>>>>> +{
>>>>>>                      \
>>>>>> +    const ColorStatsContext *s = ctx->priv;
>>>>>>                      \
>>>>>> +    ThreadData *td = arg;
>>>>>>                      \
>>>>>> +    const AVFrame *in = td->in;
>>>>>>                      \
>>>>>> +    int64_t sum[4] = { 0 }, sum2[4] = { 0 };
>>>>>>                      \
>>>>>> +    double min_value[4] = { s->max };
>>>>>>                      \
>>>>>> +    double max_value[4] = { 0 };
>>>>>>                      \
>>>>>> +    int32_t count[4] = { 0 };
>>>>>>                      \
>>>>>> +    const int width = in->width;
>>>>>>                      \
>>>>>> +    const int height = in->height;
>>>>>>                      \
>>>>>> +    const int slice_start = (height *  jobnr     ) / nb_jobs;
>>>>>>                      \
>>>>>> +    const int slice_end   = (height * (jobnr + 1)) / nb_jobs;
>>>>>>                      \
>>>>>> +    int linesize = in->linesize[0] / div;
>>>>>>                      \
>>>>>> +    uint##nbits##_t *src = (uint##nbits##_t*)in->data[0] +
>>>>>> slice_start
>>>>>> *
>>>>>> linesize;                \
>>>>>> +    const uint8_t ro = s->rgba_map[R];
>>>>>>                      \
>>>>>> +    const uint8_t go = s->rgba_map[G];
>>>>>>                      \
>>>>>> +    const uint8_t bo = s->rgba_map[B];
>>>>>>                      \
>>>>>> +    const uint8_t ao = s->rgba_map[A];
>>>>>>                      \
>>>>>> +
>>>>>>                      \
>>>>>> +    for (int y = slice_start; y < slice_end; y++) {
>>>>>>                      \
>>>>>> +        for (int x = 0; x < width * s->step; x += s->step) {
>>>>>>                      \
>>>>>> +            const int r = src[x + ro];
>>>>>>                      \
>>>>>> +            const int g = src[x + go];
>>>>>>                      \
>>>>>> +            const int b = src[x + bo];
>>>>>>                      \
>>>>>> +            const int a = src[x + ao];
>>>>>>                      \
>>>>>> +
>>>>>>                      \
>>>>>> +            sum[ro] += r;
>>>>>>                      \
>>>>>> +            sum[go] += g;
>>>>>>                      \
>>>>>> +            sum[bo] += b;
>>>>>>                      \
>>>>>> +            sum2[ro] += r * r;
>>>>>>                      \
>>>>>> +            sum2[go] += g * g;
>>>>>>                      \
>>>>>> +            sum2[bo] += b * b;
>>>>>>                      \
>>>>>> +
>>>>>>                      \
>>>>>> +            if (r > max_value[ro]) max_value[ro] = r;
>>>>>>                      \
>>>>>> +            if (r < min_value[ro]) min_value[ro] = r;
>>>>>>                      \
>>>>>> +            if (g > max_value[go]) max_value[go] = g;
>>>>>>                      \
>>>>>> +            if (g < min_value[go]) min_value[go] = g;
>>>>>>                      \
>>>>>> +            if (b > max_value[bo]) max_value[bo] = b;
>>>>>>                      \
>>>>>> +            if (b < min_value[bo]) min_value[bo] = b;
>>>>>>                      \
>>>>>> +            if (s->step == 4) {
>>>>>>                      \
>>>>>> +                sum2[ao] += a * a;
>>>>>>                      \
>>>>>> +                sum[ao] += a;
>>>>>>                      \
>>>>>> +                if (a > max_value[ao]) max_value[ao] = a;
>>>>>>                      \
>>>>>> +                if (a < min_value[ao]) min_value[ao] = a;
>>>>>>                      \
>>>>>> +            }
>>>>>>                      \
>>>>>> +        }
>>>>>>                      \
>>>>>> +        count[ro] += width;
>>>>>>                      \
>>>>>> +        count[go] += width;
>>>>>>                      \
>>>>>> +        count[bo] += width;
>>>>>>                      \
>>>>>> +        if (s->step == 4)
>>>>>>                      \
>>>>>> +            count[ao] += width;
>>>>>>                      \
>>>>>> +        src += linesize;
>>>>>>                      \
>>>>>> +    }
>>>>>>                      \
>>>>>> +
>>>>>>                      \
>>>>>> +    for (int p = 0; p < s->nb_components; p++) {
>>>>>>                      \
>>>>>> +        int ci = s->is_rgb ? s->rgba_map[p] : p;
>>>>>>                      \
>>>>>> +        double variance;
>>>>>>                      \
>>>>>> +
>>>>>>                      \
>>>>>> +        s->mean[ci][jobnr] = (double)(sum[ci] + count[ci] / 2) /
>>>>>> count[ci];
>>>>>>                      \
>>>>>> +        variance = (sum2[ci] - sum[ci] * (double)sum[ci] /
>>>>>> count[ci]) /
>>>>>> count[ci];                \
>>>>>> +        s->stdev[ci][jobnr] = sqrt(variance);
>>>>>>                      \
>>>>>> +        s->min_value[ci][jobnr] = min_value[ci];
>>>>>>                      \
>>>>>> +        s->max_value[ci][jobnr] = max_value[ci];
>>>>>>                      \
>>>>>> +    }
>>>>>>                      \
>>>>>> +
>>>>>>                      \
>>>>>> +    return 0;
>>>>>>                      \
>>>>>> +}
>>>>>> +DECLARE_STATS_PACKED_FUNC(8, 1)
>>>>>> +DECLARE_STATS_PACKED_FUNC(16, 2)
>>>>>> +
>>>>>> +static av_cold void uninit(AVFilterContext *ctx)
>>>>>> +{
>>>>>> +    ColorStatsContext *s = ctx->priv;
>>>>>> +
>>>>>> +    for (int i = 0; i < s->nb_components; i++) {
>>>>>> +        av_freep(&s->mean[i]);
>>>>>> +        av_freep(&s->stdev[i]);
>>>>>> +        av_freep(&s->min_value[i]);
>>>>>> +        av_freep(&s->max_value[i]);
>>>>>> +    }
>>>>>> +}
>>>>>> +
>>>>>> +static int config_input(AVFilterLink *inlink)
>>>>>> +{
>>>>>> +    AVFilterContext *ctx = inlink->dst;
>>>>>> +    ColorStatsContext *s = ctx->priv;
>>>>>> +    const AVPixFmtDescriptor *desc =
>>>>>> av_pix_fmt_desc_get(inlink->format);
>>>>>> +
>>>>>> +    s->nb_components = desc->nb_components;
>>>>>> +    s->bitdepth = desc->comp[0].depth;
>>>>>> +    s->is_16bit = s->bitdepth > 8;
>>>>>> +    s->step = av_get_padded_bits_per_pixel(desc) >> (3 +
>>>>>> s->is_16bit);
>>>>>> +    s->max  = 1 << s->bitdepth - 1;
>>>>>> +
>>>>>> +    s->is_rgb = ff_fill_rgba_map(s->rgba_map, inlink->format) >= 0;
>>>>>> +    s->comps[0] = s->is_rgb ? 'r' : 'y' ;
>>>>>> +    s->comps[1] = s->is_rgb ? 'g' : 'u' ;
>>>>>> +    s->comps[2] = s->is_rgb ? 'b' : 'v' ;
>>>>>> +    s->comps[3] = 'a';
>>>>>> +
>>>>>> +    s->thread_count = FFMAX(1, FFMIN(inlink->h,
>>>>>> ff_filter_get_nb_threads(ctx)));
>>>>>> +    for (int i = 0; i < s->nb_components; i++) {
>>>>>> +        ptrdiff_t line_size = av_image_get_linesize(inlink->format,
>>>>>> inlink->w, i);
>>>>>> +
>>>>>> +        s->width[i] = line_size >> (s->bitdepth > 8);
>>>>>> +        s->height[i] = inlink->h >> ((i == 1 || i == 2) ?
>>>>>> desc->log2_chroma_h : 0);
>>>>>> +
>>>>>> +        s->mean[i] = av_mallocz_array(s->thread_count,
>>>>>> sizeof(*s->mean[i]));
>>>>>> +        s->stdev[i] = av_mallocz_array(s->thread_count,
>>>>>> sizeof(*s->stdev[i]));
>>>>>> +        s->max_value[i] = av_mallocz_array(s->thread_count,
>>>>>> sizeof(*s->max_value[i]));
>>>>>> +        s->min_value[i] = av_mallocz_array(s->thread_count,
>>>>>> sizeof(*s->min_value[i]));
>>>>>> +        if (!s->mean[i] || !s->stdev[i] || !s->max_value[i] ||
>>>>>> !s->min_value[i])
>>>>>> +            return AVERROR(ENOMEM);
>>>>>> +        for (int j = 0; j < s->thread_count; j++) {
>>>>>> +            s->min_value[i][j] = (1 << s->bitdepth);
>>>>>> +            s->max_value[i][j] = 0;
>>>>>> +        }
>>>>>> +    }
>>>>>> +
>>>>>> +    if (desc->flags & AV_PIX_FMT_FLAG_PLANAR)
>>>>>> +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_planar_8 :
>>>>>> stats_slice_planar_16;
>>>>>> +    else
>>>>>> +        s->stats_slice = s->bitdepth <= 8 ? stats_slice_packed_8 :
>>>>>> stats_slice_packed_16;
>>>>>> +
>>>>>> +    return 0;
>>>>>> +}
>>>>>> +
>>>>>> +static void set_meta_float(AVDictionary **metadata, const char *key,
>>>>>> char
>>>>>> c, float d)
>>>>>> +{
>>>>>> +    char value[128];
>>>>>> +    char key2[128];
>>>>>> +
>>>>>> +    snprintf(value, sizeof(value), "%.2f", d);
>>>>>> +    if (c)
>>>>>> +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key,
>>>>>> c);
>>>>>> +    else
>>>>>> +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
>>>>>> +    av_dict_set(metadata, key2, value, 0);
>>>>>> +}
>>>>>> +
>>>>>> +static void set_meta_int(AVDictionary **metadata, const char *key,
>>>>>> char
>>>>>> c,
>>>>>> int d)
>>>>>> +{
>>>>>> +    char value[128];
>>>>>> +    char key2[128];
>>>>>> +
>>>>>> +    snprintf(value, sizeof(value), "%d", d);
>>>>>> +    if (c)
>>>>>> +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key,
>>>>>> c);
>>>>>> +    else
>>>>>> +        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
>>>>>> +    av_dict_set(metadata, key2, value, 0);
>>>>>> +}
>>>>>> +
>>>>>> +static void report_detect_result(AVFilterContext *ctx, AVFrame *in)
>>>>>> +{
>>>>>> +    const ColorStatsContext *s = ctx->priv;
>>>>>> +    double mean[4] = { 0 };
>>>>>> +    double stdev[4] = { 0 };
>>>>>> +    double min_value[4] = { s->max };
>>>>>> +    double max_value[4] = { 0 };
>>>>>> +    int cidx;
>>>>>> +
>>>>>> +    for (int p = 0; p < s->nb_components; p++) {
>>>>>> +        cidx = s->is_rgb ? s->rgba_map[p] : p;
>>>>>> +
>>>>>> +        if (!(s->planes & (1 << p)))
>>>>>> +            continue;
>>>>>> +
>>>>>> +        for (int j = 0; j < s->thread_count; j++) {
>>>>>> +            mean[cidx] += s->mean[cidx][j];
>>>>>> +            stdev[cidx] += s->stdev[cidx][j];
>>>>>> +            if (s->min_value[cidx][j] < min_value[cidx])
>>>>>> +                min_value[cidx] = s->min_value[cidx][j];
>>>>>> +            if (s->max_value[cidx][j] > max_value[cidx])
>>>>>> +                max_value[cidx] = s->max_value[cidx][j];
>>>>>> +        }
>>>>>> +        mean[cidx] = mean[cidx] / s->thread_count;
>>>>>> +        stdev[cidx] = stdev[cidx] / s->thread_count;
>>>>>> +
>>>>>> +        set_meta_int(&in->metadata, "min", s->comps[p],
>>>>>> min_value[cidx]);
>>>>>> +        set_meta_int(&in->metadata, "max", s->comps[p],
>>>>>> max_value[cidx]);
>>>>>> +        set_meta_int(&in->metadata, "mean", s->comps[p],
>>>>>> mean[cidx]);
>>>>>> +        set_meta_int(&in->metadata, "stdev", s->comps[p],
>>>>>> stdev[cidx]);
>>>>>> +
>>>>>> +        set_meta_float(&in->metadata, "pmin", s->comps[p],
>>>>>> min_value[cidx]
>>>>>> / s->max);
>>>>>> +        set_meta_float(&in->metadata, "pmax", s->comps[p],
>>>>>> max_value[cidx]
>>>>>> / s->max);
>>>>>> +        set_meta_float(&in->metadata, "pmean", s->comps[p],
>>>>>> mean[cidx]
>>>>>> /
>>>>>> s->max);
>>>>>> +        set_meta_float(&in->metadata, "pstdev", s->comps[p],
>>>>>> stdev[cidx] /
>>>>>> s->max);
>>>>>> +    }
>>>>>> +}
>>>>>> +
>>>>>> +static int activate(AVFilterContext *ctx)
>>>>>> +{
>>>>>> +    int ret;
>>>>>> +    AVFilterLink *inlink = ctx->inputs[0];
>>>>>> +    AVFilterLink *outlink = ctx->outputs[0];
>>>>>> +    ColorStatsContext *s = ctx->priv;
>>>>>> +    AVFrame *in;
>>>>>> +    ThreadData td;
>>>>>> +
>>>>>> +    FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
>>>>>> +
>>>>>> +    ret = ff_inlink_consume_frame(inlink, &in);
>>>>>> +    if (ret < 0)
>>>>>> +        return ret;
>>>>>> +
>>>>>> +    if (in) {
>>>>>> +        td.in = in;
>>>>>> +        ctx->internal->execute(ctx, s->stats_slice, &td, NULL,
>>>>>> s->thread_count);
>>>>>> +
>>>>>> +        report_detect_result(ctx, in);
>>>>>> +        return ff_filter_frame(outlink, in);
>>>>>> +    }
>>>>>> +
>>>>>> +    FF_FILTER_FORWARD_STATUS(inlink, outlink);
>>>>>> +    FF_FILTER_FORWARD_WANTED(outlink, inlink);
>>>>>> +
>>>>>> +    return FFERROR_NOT_READY;
>>>>>> +}
>>>>>> +
>>>>>> +static const AVFilterPad inputs[] = {
>>>>>> +    {
>>>>>> +        .name         = "default",
>>>>>> +        .type         = AVMEDIA_TYPE_VIDEO,
>>>>>> +        .config_props = config_input,
>>>>>> +    },
>>>>>> +    { NULL }
>>>>>> +};
>>>>>> +
>>>>>> +static const AVFilterPad outputs[] = {
>>>>>> +    {
>>>>>> +        .name          = "default",
>>>>>> +        .type          = AVMEDIA_TYPE_VIDEO,
>>>>>> +    },
>>>>>> +    { NULL }
>>>>>> +};
>>>>>> +
>>>>>> +#define DEFINE_COLOR_FILTER(name_, description_)
>>>>>> \
>>>>>> +    AVFilter ff_vf_##name_ = {
>>>>>> \
>>>>>> +        .name          = #name_,
>>>>>> \
>>>>>> +        .description   = NULL_IF_CONFIG_SMALL(description_),
>>>>>> \
>>>>>> +        .priv_size     = sizeof(ColorStatsContext),
>>>>>> \
>>>>>> +        .priv_class    = &name_ ## _class,
>>>>>> \
>>>>>> +        .init          = name_##_init,
>>>>>> \
>>>>>> +        .uninit        = uninit,
>>>>>> \
>>>>>> +        .query_formats = query_formats,
>>>>>> \
>>>>>> +        .inputs        = inputs,
>>>>>> \
>>>>>> +        .outputs       = outputs,
>>>>>> \
>>>>>> +        .activate      = activate,
>>>>>> \
>>>>>> +        .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC |
>>>>>> \
>>>>>> +                         AVFILTER_FLAG_SLICE_THREADS,
>>>>>> \
>>>>>> +    }
>>>>>> +
>>>>>> +#if CONFIG_COLORSTATS_FILTER
>>>>>> +
>>>>>> +#define colorstats_options options
>>>>>> +AVFILTER_DEFINE_CLASS(colorstats);
>>>>>> +
>>>>>> +static int colorstats_init(AVFilterContext *ctx)
>>>>>> +{
>>>>>> +    return 0;
>>>>>> +}
>>>>>> +
>>>>>> +DEFINE_COLOR_FILTER(colorstats, "Video color stats.");
>>>>>> +#endif
>>>>>> +
>>>>>> +#if CONFIG_COLORRGBSTATS_FILTER
>>>>>> +
>>>>>> +#define colorrgbstats_options options
>>>>>> +AVFILTER_DEFINE_CLASS(colorrgbstats);
>>>>>> +
>>>>>> +static int colorrgbstats_init(AVFilterContext *ctx)
>>>>>> +{
>>>>>> +    ColorStatsContext *s = ctx->priv;
>>>>>> +
>>>>>> +    s->force_fmt = 1;
>>>>>> +    return 0;
>>>>>> +}
>>>>>> +
>>>>>> +DEFINE_COLOR_FILTER(colorrgbstats, "Video RGB color stats.");
>>>>>> +#endif
>>>>>> +
>>>>>> +#if CONFIG_COLORYUVSTATS_FILTER
>>>>>> +
>>>>>> +#define coloryuvstats_options options
>>>>>> +AVFILTER_DEFINE_CLASS(coloryuvstats);
>>>>>> +
>>>>>> +static int coloryuvstats_init(AVFilterContext *ctx)
>>>>>> +{
>>>>>> +    ColorStatsContext *s = ctx->priv;
>>>>>> +
>>>>>> +    s->force_fmt = 2;
>>>>>> +    return 0;
>>>>>> +}
>>>>>> +
>>>>>> +DEFINE_COLOR_FILTER(coloryuvstats, "Video YUV color stats.");
>>>>>> +#endif
>>>>>> --
>>>>>> 2.21.0
>>>>>> 
>>>>>> _______________________________________________
>>>>>> ffmpeg-devel mailing list
>>>>>> ffmpeg-devel@ffmpeg.org
>>>>>> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>>>>>> 
>>>>>> To unsubscribe, visit link above, or email
>>>>>> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
>>>> 
>>>> --
>>>> Thanks,
>>>> Limin Wang
>>>> _______________________________________________
>>>> ffmpeg-devel mailing list
>>>> ffmpeg-devel@ffmpeg.org
>>>> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>>>> 
>>>> To unsubscribe, visit link above, or email
>>>> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
>>> _______________________________________________
>>> ffmpeg-devel mailing list
>>> ffmpeg-devel@ffmpeg.org
>>> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>>> 
>>> To unsubscribe, visit link above, or email
>>> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
>> 
>> --
>> Thanks,
>> Limin Wang
>> _______________________________________________
>> ffmpeg-devel mailing list
>> ffmpeg-devel@ffmpeg.org
>> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>> 
>> To unsubscribe, visit link above, or email
>> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
> 
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
diff mbox

Patch

diff --git a/doc/filters.texi b/doc/filters.texi
index 8c5d3a5760..81968b2c17 100644
--- a/doc/filters.texi
+++ b/doc/filters.texi
@@ -7695,6 +7695,80 @@  For example to convert the input to SMPTE-240M, use the command:
 colorspace=smpte240m
 @end example
 
+@section colorstats, colorrgbstats, coloryuvstats
+The filter provides statistical video measurements such as mean, minimum, maximum and
+standard deviation for each frame. The user can check for unexpected/accidental errors
+very quickly with them.
+
+@var{colorrgbstats} report the color stats for RGB input video, @var{coloryuvstats}
+to an YUV input video.
+
+These filters accept the following parameters:
+@table @option
+@item planes
+Set which planes to filter. Default is only the first plane.
+@end table
+
+By default the filter will report these metadata values if the planes
+are processed:
+
+@table @option
+@item min.y, min.u, min.v, min.r, min.g, min.b, min.a
+Display the minimal Y/U/V/R/G/B/A plane value contained within the input frame.
+Expressed in range of [0, 1<<bitdepth-1]
+
+@item pmin.y, pmin.u, pmin.v, pmin.r, pmin.g, pmin.b, min.a
+Display the minimal Y/U/V/R/G/B/A plane percentage of maximum contained within
+the input frame. Expressed in range of [0, 1]
+
+@item max.y, max.u, max.v, max.r, max.g, max.b, max.a
+Display the maximum Y/U/V/R/G/B/A plane value contained within the input frame.
+Expressed in range of [0, 1<<bitdepth-1]
+
+@item pmax.y, pmax.u, pmax.v, pmax.r, pmax.g, pmax.b, pmax.a
+Display the maximum Y/U/V/R/G/B/A plane percentage of maximum contained within
+the input frame. Expressed in range of [0, 1]
+
+@item mean.y, mean.u, mean.v, mean.r, mean.g, mean.b, mean.a
+Display the Y/U/V/R/G/B/A plane mean value contained within the input frame.
+Expressed in range of [0, 1<<bitdepth-1]
+
+@item pmean.y, pmean.u, pmean.v, pmean.r, pmean.g, pmean.b, pmean.a
+Display the Y/U/V/R/G/B/A plane mean value percentage of maximum contained within
+the input frame. Expressed in range of [0, 1]
+
+@item stdev.y, stdev.u, stdev.v, stdev.r, stdev.g, stdev.b, stdev.a
+Display the Y/U/V/R/G/B/A plane standard deviation value contained within the
+input frame. Expressed in range of [0, 1<<bitdepth-1]
+
+@item pstdev.y, pstdev.u, pstdev.v, pstdev.r, pstdev.g, pstdev.b, pstdev.a
+Display the Y/U/V/R/G/B/A plane standard deviation value percentage of maximum contained
+within the input frame. Expressed in range of [0, 1]
+@end table
+
+@subsection Examples
+
+@itemize
+@item
+Show all YUV color stats for each frame:
+@example
+ffprobe -f lavfi movie=example.mov,coloryuvstats=planes=0xf -show_frames
+@end example
+
+@item
+Draw graph for the pmean and pstdev value of the Y plane per frame:
+@example
+ffplay -i example.mov -vf coloryuvstats,drawgraph=m1=lavf.colorstats.pmean.y:m2=lavf.colorstats.pstdev.y:min=0:max=1
+@end example
+
+@item
+Print all RGB color stats for each frame:
+@example
+ffplay -i example.mov -vf colorrgbstats=planes=0xf,metadata=mode=print
+@end example
+
+@end itemize
+
 @section convolution
 
 Apply convolution of 3x3, 5x5, 7x7 or horizontal/vertical up to 49 elements.
diff --git a/libavfilter/Makefile b/libavfilter/Makefile
index 37d4eee858..a007bd32d1 100644
--- a/libavfilter/Makefile
+++ b/libavfilter/Makefile
@@ -182,6 +182,7 @@  OBJS-$(CONFIG_CIESCOPE_FILTER)               += vf_ciescope.o
 OBJS-$(CONFIG_CODECVIEW_FILTER)              += vf_codecview.o
 OBJS-$(CONFIG_COLORBALANCE_FILTER)           += vf_colorbalance.o
 OBJS-$(CONFIG_COLORCHANNELMIXER_FILTER)      += vf_colorchannelmixer.o
+OBJS-$(CONFIG_COLORSTATS_FILTER)             += vf_colorstats.o
 OBJS-$(CONFIG_COLORKEY_FILTER)               += vf_colorkey.o
 OBJS-$(CONFIG_COLORKEY_OPENCL_FILTER)        += vf_colorkey_opencl.o opencl.o \
                                                 opencl/colorkey.o
diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
index c295f8e403..6b84a45452 100644
--- a/libavfilter/allfilters.c
+++ b/libavfilter/allfilters.c
@@ -172,6 +172,9 @@  extern AVFilter ff_vf_ciescope;
 extern AVFilter ff_vf_codecview;
 extern AVFilter ff_vf_colorbalance;
 extern AVFilter ff_vf_colorchannelmixer;
+extern AVFilter ff_vf_colorstats;
+extern AVFilter ff_vf_colorrgbstats;
+extern AVFilter ff_vf_coloryuvstats;
 extern AVFilter ff_vf_colorkey;
 extern AVFilter ff_vf_colorkey_opencl;
 extern AVFilter ff_vf_colorhold;
diff --git a/libavfilter/vf_colorstats.c b/libavfilter/vf_colorstats.c
new file mode 100644
index 0000000000..7e94c572f9
--- /dev/null
+++ b/libavfilter/vf_colorstats.c
@@ -0,0 +1,461 @@ 
+/*
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavutil/imgutils.h"
+#include "libavutil/opt.h"
+#include "libavutil/pixdesc.h"
+
+#include "avfilter.h"
+#include "drawutils.h"
+#include "filters.h"
+
+#define R 0
+#define G 1
+#define B 2
+#define A 3
+
+typedef struct ThreadData {
+    AVFrame *in;
+} ThreadData;
+
+typedef struct ColorStatsContext {
+    const AVClass *class;
+
+    ptrdiff_t width[4];
+    ptrdiff_t height[4];
+
+    int planes;
+    int step;
+    int bitdepth;
+    int nb_components;
+    int thread_count;
+    int is_16bit;
+    int is_rgb;
+    int force_fmt; /* 0: all, 1: rgb, 2: yuv */
+    uint8_t rgba_map[4];
+    char comps[4];
+
+    double *mean[4];
+    double *stdev[4];
+
+    int max;
+    double *min_value[4];
+    double *max_value[4];
+
+    int  (*stats_slice)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
+} ColorStatsContext;
+
+#define OFFSET(x) offsetof(ColorStatsContext, x)
+#define V AV_OPT_FLAG_VIDEO_PARAM
+#define F AV_OPT_FLAG_FILTERING_PARAM
+
+static const AVOption options[] = {
+    { "planes", "set planes to filter", OFFSET(planes), AV_OPT_TYPE_INT,  {.i64=1}, 1, 0xf, V|F},
+    {NULL}
+};
+
+#define YUV_FORMATS                                                 \
+    AV_PIX_FMT_YUV444P,  AV_PIX_FMT_YUV422P,  AV_PIX_FMT_YUV420P,   \
+    AV_PIX_FMT_YUV411P,  AV_PIX_FMT_YUV410P,  AV_PIX_FMT_YUV440P,   \
+    AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ422P,                       \
+    AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ440P,                       \
+    AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV422P9,                       \
+    AV_PIX_FMT_YUV420P9,                                            \
+    AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV422P10,                     \
+    AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV440P10,                     \
+    AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV422P12,                     \
+    AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV440P12,                     \
+    AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV422P14,                     \
+    AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV444P16,                     \
+    AV_PIX_FMT_YUV422P16, AV_PIX_FMT_YUV420P16,                     \
+    AV_PIX_FMT_YUVA420P,  AV_PIX_FMT_YUVA422P,                      \
+    AV_PIX_FMT_YUVA444P16,AV_PIX_FMT_YUVA422P16,                    \
+    AV_PIX_FMT_YUVA420P16,AV_PIX_FMT_YUVA422P12,                    \
+    AV_PIX_FMT_YUVA444P12,AV_PIX_FMT_YUVA444P,                      \
+    AV_PIX_FMT_GRAY8,     AV_PIX_FMT_GRAY9,   AV_PIX_FMT_GRAY10,    \
+    AV_PIX_FMT_GRAY12,    AV_PIX_FMT_GRAY14,  AV_PIX_FMT_GRAY16
+
+#define RGB_FORMATS                                                 \
+    AV_PIX_FMT_0RGB,      AV_PIX_FMT_0BGR,                          \
+    AV_PIX_FMT_RGB0,      AV_PIX_FMT_BGR0,                          \
+    AV_PIX_FMT_ARGB,      AV_PIX_FMT_RGBA,    AV_PIX_FMT_ABGR,      \
+    AV_PIX_FMT_BGRA,      AV_PIX_FMT_RGB24,   AV_PIX_FMT_BGR24,     \
+    AV_PIX_FMT_RGB48,     AV_PIX_FMT_RGBA64,  AV_PIX_FMT_GBRP,      \
+    AV_PIX_FMT_GBRAP,     AV_PIX_FMT_GBRP9,   AV_PIX_FMT_GBRP10,    \
+    AV_PIX_FMT_GBRAP10,   AV_PIX_FMT_GBRP12,  AV_PIX_FMT_GBRP14,    \
+    AV_PIX_FMT_GBRP16,    AV_PIX_FMT_GBRAP12, AV_PIX_FMT_GBRAP16
+
+static const enum AVPixelFormat yuv_pix_fmts[] = { YUV_FORMATS, AV_PIX_FMT_NONE };
+static const enum AVPixelFormat rgb_pix_fmts[] = { RGB_FORMATS, AV_PIX_FMT_NONE };
+static const enum AVPixelFormat all_pix_fmts[] = { RGB_FORMATS, YUV_FORMATS, AV_PIX_FMT_NONE };
+
+static int query_formats(AVFilterContext *ctx)
+{
+    const ColorStatsContext *s = ctx->priv;
+    const enum AVPixelFormat *pix_fmts = s->force_fmt == 1 ? rgb_pix_fmts :
+                                         s->force_fmt == 2 ? yuv_pix_fmts :
+                                         all_pix_fmts;
+
+    AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
+    if (!fmts_list)
+        return AVERROR(ENOMEM);
+    return ff_set_common_formats(ctx, fmts_list);
+}
+
+#define DECLARE_STATS_PLANAR_FUNC(nbits, div)                                                     \
+static int stats_slice_planar_##nbits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)    \
+{                                                                                                 \
+    const ColorStatsContext *s = ctx->priv;                                                       \
+    ThreadData *td = arg;                                                                         \
+    const AVFrame *in = td->in;                                                                   \
+    int64_t sum[4] = { 0 }, sum2[4] = { 0 };                                                      \
+    int32_t count[4] = { 0 };                                                                     \
+    double min_value[4] = { s->max };                                                             \
+    double max_value[4] = { 0 };                                                                  \
+                                                                                                  \
+    for (int i = 0; i < s->nb_components; i++) {                                                  \
+        const int width = s->width[i];                                                            \
+        const int height = s->height[i];                                                          \
+        const int slice_start = (height *  jobnr     ) / nb_jobs;                                 \
+        const int slice_end   = (height * (jobnr + 1)) / nb_jobs;                                 \
+        int linesize = in->linesize[i] / div;                                                     \
+        uint##nbits##_t *src = (uint##nbits##_t*)in->data[i] + slice_start * linesize;            \
+                                                                                                  \
+        if (!(s->planes & (1 << i)))                                                              \
+            continue;                                                                             \
+        for (int j = slice_start; j < slice_end; j++) {                                           \
+            for (int x = 0; x < width; x++) {                                                     \
+                sum[i] += src[x];                                                                 \
+                sum2[i] += src[x] * src[x];                                                       \
+                if (src[i] > max_value[i]) max_value[i] = src[i];                                 \
+                if (src[i] < min_value[i]) min_value[i] = src[i];                                 \
+            }                                                                                     \
+            count[i] += width;                                                                    \
+            src += linesize;                                                                      \
+        }                                                                                         \
+                                                                                                  \
+        s->mean[i][jobnr] = (double)(sum[i] + count[i] / 2) / count[i];                           \
+        s->stdev[i][jobnr] = sqrt((sum2[i] - sum[i] * (double)sum[i] / count[i]) / count[i]);     \
+        s->min_value[i][jobnr] = min_value[i];                                                    \
+        s->max_value[i][jobnr] = max_value[i];                                                    \
+    }                                                                                             \
+                                                                                                  \
+    return 0;                                                                                     \
+}
+DECLARE_STATS_PLANAR_FUNC(8, 1)
+DECLARE_STATS_PLANAR_FUNC(16, 2)
+
+#define DECLARE_STATS_PACKED_FUNC(nbits, div)                                                     \
+static int stats_slice_packed_##nbits(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)    \
+{                                                                                                 \
+    const ColorStatsContext *s = ctx->priv;                                                       \
+    ThreadData *td = arg;                                                                         \
+    const AVFrame *in = td->in;                                                                   \
+    int64_t sum[4] = { 0 }, sum2[4] = { 0 };                                                      \
+    double min_value[4] = { s->max };                                                             \
+    double max_value[4] = { 0 };                                                                  \
+    int32_t count[4] = { 0 };                                                                     \
+    const int width = in->width;                                                                  \
+    const int height = in->height;                                                                \
+    const int slice_start = (height *  jobnr     ) / nb_jobs;                                     \
+    const int slice_end   = (height * (jobnr + 1)) / nb_jobs;                                     \
+    int linesize = in->linesize[0] / div;                                                         \
+    uint##nbits##_t *src = (uint##nbits##_t*)in->data[0] + slice_start * linesize;                \
+    const uint8_t ro = s->rgba_map[R];                                                            \
+    const uint8_t go = s->rgba_map[G];                                                            \
+    const uint8_t bo = s->rgba_map[B];                                                            \
+    const uint8_t ao = s->rgba_map[A];                                                            \
+                                                                                                  \
+    for (int y = slice_start; y < slice_end; y++) {                                               \
+        for (int x = 0; x < width * s->step; x += s->step) {                                      \
+            const int r = src[x + ro];                                                            \
+            const int g = src[x + go];                                                            \
+            const int b = src[x + bo];                                                            \
+            const int a = src[x + ao];                                                            \
+                                                                                                  \
+            sum[ro] += r;                                                                         \
+            sum[go] += g;                                                                         \
+            sum[bo] += b;                                                                         \
+            sum2[ro] += r * r;                                                                    \
+            sum2[go] += g * g;                                                                    \
+            sum2[bo] += b * b;                                                                    \
+                                                                                                  \
+            if (r > max_value[ro]) max_value[ro] = r;                                             \
+            if (r < min_value[ro]) min_value[ro] = r;                                             \
+            if (g > max_value[go]) max_value[go] = g;                                             \
+            if (g < min_value[go]) min_value[go] = g;                                             \
+            if (b > max_value[bo]) max_value[bo] = b;                                             \
+            if (b < min_value[bo]) min_value[bo] = b;                                             \
+            if (s->step == 4) {                                                                   \
+                sum2[ao] += a * a;                                                                \
+                sum[ao] += a;                                                                     \
+                if (a > max_value[ao]) max_value[ao] = a;                                         \
+                if (a < min_value[ao]) min_value[ao] = a;                                         \
+            }                                                                                     \
+        }                                                                                         \
+        count[ro] += width;                                                                       \
+        count[go] += width;                                                                       \
+        count[bo] += width;                                                                       \
+        if (s->step == 4)                                                                         \
+            count[ao] += width;                                                                   \
+        src += linesize;                                                                          \
+    }                                                                                             \
+                                                                                                  \
+    for (int p = 0; p < s->nb_components; p++) {                                                  \
+        int ci = s->is_rgb ? s->rgba_map[p] : p;                                                  \
+        double variance;                                                                          \
+                                                                                                  \
+        s->mean[ci][jobnr] = (double)(sum[ci] + count[ci] / 2) / count[ci];                       \
+        variance = (sum2[ci] - sum[ci] * (double)sum[ci] / count[ci]) / count[ci];                \
+        s->stdev[ci][jobnr] = sqrt(variance);                                                     \
+        s->min_value[ci][jobnr] = min_value[ci];                                                  \
+        s->max_value[ci][jobnr] = max_value[ci];                                                  \
+    }                                                                                             \
+                                                                                                  \
+    return 0;                                                                                     \
+}
+DECLARE_STATS_PACKED_FUNC(8, 1)
+DECLARE_STATS_PACKED_FUNC(16, 2)
+
+static av_cold void uninit(AVFilterContext *ctx)
+{
+    ColorStatsContext *s = ctx->priv;
+
+    for (int i = 0; i < s->nb_components; i++) {
+        av_freep(&s->mean[i]);
+        av_freep(&s->stdev[i]);
+        av_freep(&s->min_value[i]);
+        av_freep(&s->max_value[i]);
+    }
+}
+
+static int config_input(AVFilterLink *inlink)
+{
+    AVFilterContext *ctx = inlink->dst;
+    ColorStatsContext *s = ctx->priv;
+    const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(inlink->format);
+
+    s->nb_components = desc->nb_components;
+    s->bitdepth = desc->comp[0].depth;
+    s->is_16bit = s->bitdepth > 8;
+    s->step = av_get_padded_bits_per_pixel(desc) >> (3 + s->is_16bit);
+    s->max  = 1 << s->bitdepth - 1;
+
+    s->is_rgb = ff_fill_rgba_map(s->rgba_map, inlink->format) >= 0;
+    s->comps[0] = s->is_rgb ? 'r' : 'y' ;
+    s->comps[1] = s->is_rgb ? 'g' : 'u' ;
+    s->comps[2] = s->is_rgb ? 'b' : 'v' ;
+    s->comps[3] = 'a';
+
+    s->thread_count = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
+    for (int i = 0; i < s->nb_components; i++) {
+        ptrdiff_t line_size = av_image_get_linesize(inlink->format, inlink->w, i);
+
+        s->width[i] = line_size >> (s->bitdepth > 8);
+        s->height[i] = inlink->h >> ((i == 1 || i == 2) ? desc->log2_chroma_h : 0);
+
+        s->mean[i] = av_mallocz_array(s->thread_count, sizeof(*s->mean[i]));
+        s->stdev[i] = av_mallocz_array(s->thread_count, sizeof(*s->stdev[i]));
+        s->max_value[i] = av_mallocz_array(s->thread_count, sizeof(*s->max_value[i]));
+        s->min_value[i] = av_mallocz_array(s->thread_count, sizeof(*s->min_value[i]));
+        if (!s->mean[i] || !s->stdev[i] || !s->max_value[i] || !s->min_value[i])
+            return AVERROR(ENOMEM);
+        for (int j = 0; j < s->thread_count; j++) {
+            s->min_value[i][j] = (1 << s->bitdepth);
+            s->max_value[i][j] = 0;
+        }
+    }
+
+    if (desc->flags & AV_PIX_FMT_FLAG_PLANAR)
+        s->stats_slice = s->bitdepth <= 8 ? stats_slice_planar_8 : stats_slice_planar_16;
+    else
+        s->stats_slice = s->bitdepth <= 8 ? stats_slice_packed_8 : stats_slice_packed_16;
+
+    return 0;
+}
+
+static void set_meta_float(AVDictionary **metadata, const char *key, char c, float d)
+{
+    char value[128];
+    char key2[128];
+
+    snprintf(value, sizeof(value), "%.2f", d);
+    if (c)
+        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
+    else
+        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
+    av_dict_set(metadata, key2, value, 0);
+}
+
+static void set_meta_int(AVDictionary **metadata, const char *key, char c, int d)
+{
+    char value[128];
+    char key2[128];
+
+    snprintf(value, sizeof(value), "%d", d);
+    if (c)
+        snprintf(key2, sizeof(key2), "lavf.colorstats.%s.%c", key, c);
+    else
+        snprintf(key2, sizeof(key2), "lavf.colorstats.%s", key);
+    av_dict_set(metadata, key2, value, 0);
+}
+
+static void report_detect_result(AVFilterContext *ctx, AVFrame *in)
+{
+    const ColorStatsContext *s = ctx->priv;
+    double mean[4] = { 0 };
+    double stdev[4] = { 0 };
+    double min_value[4] = { s->max };
+    double max_value[4] = { 0 };
+    int cidx;
+
+    for (int p = 0; p < s->nb_components; p++) {
+        cidx = s->is_rgb ? s->rgba_map[p] : p;
+
+        if (!(s->planes & (1 << p)))
+            continue;
+
+        for (int j = 0; j < s->thread_count; j++) {
+            mean[cidx] += s->mean[cidx][j];
+            stdev[cidx] += s->stdev[cidx][j];
+            if (s->min_value[cidx][j] < min_value[cidx])
+                min_value[cidx] = s->min_value[cidx][j];
+            if (s->max_value[cidx][j] > max_value[cidx])
+                max_value[cidx] = s->max_value[cidx][j];
+        }
+        mean[cidx] = mean[cidx] / s->thread_count;
+        stdev[cidx] = stdev[cidx] / s->thread_count;
+
+        set_meta_int(&in->metadata, "min", s->comps[p], min_value[cidx]);
+        set_meta_int(&in->metadata, "max", s->comps[p], max_value[cidx]);
+        set_meta_int(&in->metadata, "mean", s->comps[p], mean[cidx]);
+        set_meta_int(&in->metadata, "stdev", s->comps[p], stdev[cidx]);
+
+        set_meta_float(&in->metadata, "pmin", s->comps[p], min_value[cidx] / s->max);
+        set_meta_float(&in->metadata, "pmax", s->comps[p], max_value[cidx] / s->max);
+        set_meta_float(&in->metadata, "pmean", s->comps[p], mean[cidx] / s->max);
+        set_meta_float(&in->metadata, "pstdev", s->comps[p], stdev[cidx] / s->max);
+    }
+}
+
+static int activate(AVFilterContext *ctx)
+{
+    int ret;
+    AVFilterLink *inlink = ctx->inputs[0];
+    AVFilterLink *outlink = ctx->outputs[0];
+    ColorStatsContext *s = ctx->priv;
+    AVFrame *in;
+    ThreadData td;
+
+    FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
+
+    ret = ff_inlink_consume_frame(inlink, &in);
+    if (ret < 0)
+        return ret;
+
+    if (in) {
+        td.in = in;
+        ctx->internal->execute(ctx, s->stats_slice, &td, NULL, s->thread_count);
+
+        report_detect_result(ctx, in);
+        return ff_filter_frame(outlink, in);
+    }
+
+    FF_FILTER_FORWARD_STATUS(inlink, outlink);
+    FF_FILTER_FORWARD_WANTED(outlink, inlink);
+
+    return FFERROR_NOT_READY;
+}
+
+static const AVFilterPad inputs[] = {
+    {
+        .name         = "default",
+        .type         = AVMEDIA_TYPE_VIDEO,
+        .config_props = config_input,
+    },
+    { NULL }
+};
+
+static const AVFilterPad outputs[] = {
+    {
+        .name          = "default",
+        .type          = AVMEDIA_TYPE_VIDEO,
+    },
+    { NULL }
+};
+
+#define DEFINE_COLOR_FILTER(name_, description_)                        \
+    AVFilter ff_vf_##name_ = {                                          \
+        .name          = #name_,                                        \
+        .description   = NULL_IF_CONFIG_SMALL(description_),            \
+        .priv_size     = sizeof(ColorStatsContext),                     \
+        .priv_class    = &name_ ## _class,                              \
+        .init          = name_##_init,                                  \
+        .uninit        = uninit,                                        \
+        .query_formats = query_formats,                                 \
+        .inputs        = inputs,                                        \
+        .outputs       = outputs,                                       \
+        .activate      = activate,                                      \
+        .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC |       \
+                         AVFILTER_FLAG_SLICE_THREADS,                   \
+    }
+
+#if CONFIG_COLORSTATS_FILTER
+
+#define colorstats_options options
+AVFILTER_DEFINE_CLASS(colorstats);
+
+static int colorstats_init(AVFilterContext *ctx)
+{
+    return 0;
+}
+
+DEFINE_COLOR_FILTER(colorstats, "Video color stats.");
+#endif
+
+#if CONFIG_COLORRGBSTATS_FILTER
+
+#define colorrgbstats_options options
+AVFILTER_DEFINE_CLASS(colorrgbstats);
+
+static int colorrgbstats_init(AVFilterContext *ctx)
+{
+    ColorStatsContext *s = ctx->priv;
+
+    s->force_fmt = 1;
+    return 0;
+}
+
+DEFINE_COLOR_FILTER(colorrgbstats, "Video RGB color stats.");
+#endif
+
+#if CONFIG_COLORYUVSTATS_FILTER
+
+#define coloryuvstats_options options
+AVFILTER_DEFINE_CLASS(coloryuvstats);
+
+static int coloryuvstats_init(AVFilterContext *ctx)
+{
+    ColorStatsContext *s = ctx->priv;
+
+    s->force_fmt = 2;
+    return 0;
+}
+
+DEFINE_COLOR_FILTER(coloryuvstats, "Video YUV color stats.");
+#endif