diff mbox series

[FFmpeg-devel] libavfilter/vf_signalstats.c: add new hsl(Hue, Saturation, Lightness) for filter signalstats

Message ID 20230226150256.7940-1-yizhuo.liu753@gmail.com
State New
Headers show
Series [FFmpeg-devel] libavfilter/vf_signalstats.c: add new hsl(Hue, Saturation, Lightness) for filter signalstats | expand

Checks

Context Check Description
yinshiyou/make_fate_loongarch64 fail Make fate failed
yinshiyou/make_loongarch64 warning New warnings during build
andriy/make_fate_x86 fail Make fate failed
andriy/make_x86 warning New warnings during build

Commit Message

yizhuo liu Feb. 26, 2023, 3:02 p.m. UTC
From: "yizhuo.liu753@gmail.com" <yizhuo.liu753@gmail.com>

---
 libavfilter/vf_signalstats.c | 244 +++++++++++++++++++++++++++++++----
 1 file changed, 219 insertions(+), 25 deletions(-)

Comments

Paul B Mahol Feb. 26, 2023, 8:04 p.m. UTC | #1
On 2/26/23, liuyizhuo <yizhuo.liu753@gmail.com> wrote:
> From: "yizhuo.liu753@gmail.com" <yizhuo.liu753@gmail.com>
>
> ---
>  libavfilter/vf_signalstats.c | 244 +++++++++++++++++++++++++++++++----
>  1 file changed, 219 insertions(+), 25 deletions(-)
>
> diff --git a/libavfilter/vf_signalstats.c b/libavfilter/vf_signalstats.c
> index b4d1029296..90dbe853fa 100644
> --- a/libavfilter/vf_signalstats.c
> +++ b/libavfilter/vf_signalstats.c
> @@ -1,7 +1,9 @@
>  /*
>   * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
> - * Copyright (c) 2014 Clément Bœsch
> + * Copyright (c) 2014 Clément Bœsch

Looks like not valid change.


>   * Copyright (c) 2014 Dave Rice @dericed
> + * Copyright (c) 2022 Wang Wei <wangwei1237@gmail.com>
> + * Copyright (c) 2022 Liu yizhuo  <yizhuo.liu753@gmail.com>
>   *
>   * This file is part of FFmpeg.
>   *
> @@ -23,8 +25,8 @@
>  #include "libavutil/intreadwrite.h"
>  #include "libavutil/opt.h"
>  #include "libavutil/pixdesc.h"
> -#include "filters.h"
>  #include "internal.h"
> +#include "libswscale/swscale.h"
>
>  enum FilterMode {
>      FILTER_NONE = -1,
> @@ -36,6 +38,8 @@ enum FilterMode {
>
>  typedef struct SignalstatsContext {
>      const AVClass *class;
> +    int lumah;      // height of luma plane
> +    int lumaw;      // width of luma plane
>      int chromah;    // height of chroma plane
>      int chromaw;    // width of chroma plane
>      int hsub;       // horizontal subsampling
> @@ -56,6 +60,11 @@ typedef struct SignalstatsContext {
>
>      AVFrame *frame_sat;
>      AVFrame *frame_hue;
> +    AVFrame *frame_rgb;
> +
> +    int *hsl_h;
> +    int *hsl_s;
> +    int *hsl_l;
>  } SignalstatsContext;
>
>  typedef struct ThreadData {
> @@ -65,9 +74,21 @@ typedef struct ThreadData {
>
>  typedef struct ThreadDataHueSatMetrics {
>      const AVFrame *src;
> -    AVFrame *dst_sat, *dst_hue;
> +    AVFrame *dst_sat, *dst_hue, *dst_h, *dst_s, *dst_l;
>  } ThreadDataHueSatMetrics;
>
> +typedef struct ThreadDataHSLMetrics {
> +    const AVFrame *src;
> +    int *dst_h, *dst_s, *dst_l;
> +} ThreadDataHSLMetrics;
> +
> +typedef float num;
> +
> +static const float EPSILON = 1e-9;
> +
> +/** @brief Equal of A and B */
> +#define EQ(A,B)    ((fabs((A) - (B)) < EPSILON) ? 1 : 0)
> +
>  #define OFFSET(x) offsetof(SignalstatsContext, x)
>  #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
>
> @@ -101,6 +122,7 @@ static av_cold int init(AVFilterContext *ctx)
>      s->yuv_color[0] = (( 66*r + 129*g +  25*b + (1<<7)) >> 8) +  16;
>      s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
>      s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
> +
>      return 0;
>  }
>
> @@ -110,11 +132,15 @@ static av_cold void uninit(AVFilterContext *ctx)
>      av_frame_free(&s->frame_prev);
>      av_frame_free(&s->frame_sat);
>      av_frame_free(&s->frame_hue);
> +    av_frame_free(&s->frame_rgb);
>      av_freep(&s->jobs_rets);
>      av_freep(&s->histy);
>      av_freep(&s->histu);
>      av_freep(&s->histv);
>      av_freep(&s->histsat);
> +    av_freep(&s->hsl_h);
> +    av_freep(&s->hsl_s);
> +    av_freep(&s->hsl_l);
>  }
>
>  // TODO: add more
> @@ -151,6 +177,23 @@ static AVFrame *alloc_frame(enum AVPixelFormat pixfmt,
> int w, int h)
>      return frame;
>  }
>
> +static int config_input(AVFilterLink *inlink)
> +{
> +    // Video input data avilable
> +    AVFilterContext *ctx = inlink->dst;
> +    SignalstatsContext *s = ctx->priv;
> +
> +    // free previous buffers in case they are allocated already
> +    av_frame_free(&s->frame_rgb);
> +    s->frame_rgb = alloc_frame(AV_PIX_FMT_RGB24, inlink->w, inlink->h);
> +
> +    if (!s->frame_rgb) {
> +        return AVERROR(ENOMEM);
> +    }
> +
> +    return 0;
> +}
> +
>  static int config_output(AVFilterLink *outlink)
>  {
>      AVFilterContext *ctx = outlink->src;
> @@ -172,12 +215,22 @@ static int config_output(AVFilterLink *outlink)
>      outlink->w = inlink->w;
>      outlink->h = inlink->h;
>
> +    s->lumaw   = inlink->w;
> +    s->lumah   = inlink->h;
> +
>      s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
>      s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
>
>      s->fs = inlink->w * inlink->h;
>      s->cfs = s->chromaw * s->chromah;
>
> +    s->hsl_h = av_malloc_array(s->lumah, sizeof(*s->hsl_h));
> +    s->hsl_s = av_malloc_array(s->lumah, sizeof(*s->hsl_s));
> +    s->hsl_l = av_malloc_array(s->lumah, sizeof(*s->hsl_l));
> +    if (!s->hsl_h || !s->hsl_s || !s->hsl_l) {
> +        return AVERROR(ENOMEM);
> +    }
> +
>      s->nb_jobs   = FFMAX(1, FFMIN(inlink->h,
> ff_filter_get_nb_threads(ctx)));
>      s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
>      if (!s->jobs_rets)
> @@ -455,6 +508,110 @@ static const struct {
>      {NULL}
>  };
>
> +static void YUV2RGB(const AVFrame* src, enum AVPixelFormat dstFormat,
> AVFrame* dst)
> +{
> +    int width  = src->width;
> +    int height = src->height;
> +
> +    struct SwsContext* conversion = NULL;
> +    conversion = sws_getContext(width,
> +                                height,
> +                                (enum AVPixelFormat)src->format,
> +                                width,
> +                                height,
> +                                dstFormat,
> +                                SWS_FAST_BILINEAR,
> +                                NULL,
> +                                NULL,
> +                                NULL);
> +    sws_scale(conversion, (const uint8_t * const *)src->data,
> src->linesize, 0, height, dst->data, dst->linesize);
> +    sws_freeContext(conversion);
> +}
> +
> +/**
> + * @brief Convert an sRGB color to Hue-Saturation-Lightness (HSL)
> + *
> + * @param H, S, L pointers to hold the result
> + * @param R, G, B the input sRGB values scaled in [0,1]
> + *
> + * This routine transforms from sRGB to the double hexcone HSL color space
> + * The sRGB values are assumed to be between 0 and 1.  The outputs are
> + *   H = hexagonal hue angle                (0 <= H < 360),
> + *   S = { C/(2L)     if L <= 1/2           (0 <= S <= 1),
> + *       { C/(2 - 2L) if L >  1/2
> + *   L = (max(R',G',B') + min(R',G',B'))/2  (0 <= L <= 1),
> + * where C = max(R',G',B') - min(R',G',B').
> + *
> + * Wikipedia: http://en.wikipedia.org/wiki/HSL_and_HSV
> + */
> +static void RGB2HSL(num *H, num *S, num *L, num R, num G, num B)
> +{
> +    num Max = FFMAX3(R, G, B);
> +    num Min = FFMIN3(R, G, B);
> +    num C = Max - Min;
> +
> +    *L = (Max + Min) / 2;
> +
> +    if (C > 0) {
> +        if (EQ(Max, R)) {
> +            *H = (G - B) / C;
> +
> +            if (G < B) {
> +                *H += 6;
> +            }
> +        } else if (EQ(Max, G)) {
> +            *H = 2 + (B - R) / C;
> +        } else {
> +            *H = 4 + (R - G) / C;
> +        }
> +
> +        *H *= 60;
> +        *S = (*L <= 0.5) ? (C/(2*(*L))) : (C/(2 - 2*(*L)));
> +    } else {
> +        *H = *S = 0;
> +    }
> +}
> +
> +static int compute_hsl(AVFilterContext *ctx, void *arg, int jobnr, int
> nb_jobs)
> +{
> +    int i, j;
> +    num H, S, L;
> +    ThreadDataHSLMetrics *td = arg;
> +    const SignalstatsContext *s = ctx->priv;
> +    const AVFrame *src = td->src;
> +    int *dst_h = td->dst_h;
> +    int *dst_s = td->dst_s;
> +    int *dst_l = td->dst_l;
> +
> +    const int slice_start = (s->lumah * jobnr    ) / nb_jobs;
> +    const int slice_end   = (s->lumah * (jobnr+1)) / nb_jobs;
> +
> +    const int lsz_src = src->linesize[0];
> +    const uint8_t *p_src = src->data[0] + slice_start * lsz_src;
> +
> +    for (j = slice_start; j < slice_end; j++) {
> +        int line_h = 0, line_s = 0, line_l = 0;
> +        for (i = 0; i < s->lumaw; i++) {
> +            const uint8_t rgbr = p_src[3 * i];
> +            const uint8_t rgbg = p_src[3 * i + 1];
> +            const uint8_t rgbb = p_src[3 * i + 2];
> +
> +            RGB2HSL(&H, &S, &L, 1.0 * rgbr / 255, 1.0 * rgbg / 255, 1.0 *
> rgbb / 255);
> +            line_h += (uint16_t)(H + 0.5);
> +            line_s += (uint8_t)(S * 100 + 0.5);
> +            line_l += (uint8_t)(L * 100 + 0.5);
> +        }
> +
> +        dst_h[j] = line_h / s->lumaw;
> +        dst_s[j] = line_s / s->lumaw;
> +        dst_l[j] = line_l / s->lumaw;
> +
> +        p_src += lsz_src;
> +    }
> +
> +    return 0;
> +}
> +
>  static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int
> jobnr, int nb_jobs)
>  {
>      int i, j;
> @@ -562,11 +719,12 @@ static int filter_frame8(AVFilterLink *link, AVFrame
> *in)
>      int accy, accu, accv;
>      int accsat, acchue = 0;
>      int medhue, maxhue;
> +    int avgh = 0, avgs = 0, avgl = 0;
>      int toty = 0, totu = 0, totv = 0, totsat=0;
>      int tothue = 0;
>      int dify = 0, difu = 0, difv = 0;
>      uint16_t masky = 0, masku = 0, maskv = 0;
> -    int ret;
> +
>      int filtot[FILT_NUMB] = {0};
>      AVFrame *prev;
>
> @@ -589,21 +747,34 @@ static int filter_frame8(AVFilterLink *link, AVFrame
> *in)
>
>      if (s->outfilter != FILTER_NONE) {
>          out = av_frame_clone(in);
> -        if (!out) {
> -            av_frame_free(&in);
> -            return AVERROR(ENOMEM);
> -        }
> -        ret = ff_inlink_make_frame_writable(link, &out);
> -        if (ret < 0) {
> -            av_frame_free(&out);
> -            av_frame_free(&in);
> -            return ret;
> -        }
> +        av_frame_make_writable(out);
>      }
>
>      ff_filter_execute(ctx, compute_sat_hue_metrics8, &td_huesat,
>                        NULL, FFMIN(s->chromah,
> ff_filter_get_nb_threads(ctx)));
>
> +    // Calculate HSL information.
> +    YUV2RGB(in, AV_PIX_FMT_RGB24, s->frame_rgb);
> +    ThreadDataHSLMetrics td_hsl = {
> +        .src   = s->frame_rgb,
> +        .dst_h = s->hsl_h,
> +        .dst_s = s->hsl_s,
> +        .dst_l = s->hsl_l,
> +    };
> +    ff_filter_execute(ctx, compute_hsl, &td_hsl,
> +                      NULL, FFMIN(link->h, ff_filter_get_nb_threads(ctx)));
> +
> +    int sumh = 0, sums = 0, suml = 0;
> +    for (j = 0; j < s->lumah; j++) {
> +        sumh += s->hsl_h[j];
> +        sums += s->hsl_s[j];
> +        suml += s->hsl_l[j];
> +    }
> +
> +    avgh = sumh / s->lumah;
> +    avgs = sums / s->lumah;
> +    avgl = suml / s->lumah;
> +
>      // Calculate luma histogram and difference with previous frame or
> field.
>      memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
>      for (j = 0; j < link->h; j++) {
> @@ -746,6 +917,10 @@ static int filter_frame8(AVFilterLink *link, AVFrame
> *in)
>      SET_META("HUEMED",  "%d", medhue);
>      SET_META("HUEAVG",  "%g", 1.0 * tothue / s->cfs);
>
> +    SET_META("HAVG",    "%d", avgh);
> +    SET_META("SAVG",    "%d", avgs);
> +    SET_META("LAVG",    "%d", avgl);
> +
>      SET_META("YDIF",    "%g", 1.0 * dify / s->fs);
>      SET_META("UDIF",    "%g", 1.0 * difu / s->cfs);
>      SET_META("VDIF",    "%g", 1.0 * difv / s->cfs);
> @@ -793,6 +968,7 @@ static int filter_frame16(AVFilterLink *link, AVFrame
> *in)
>      int accy, accu, accv;
>      int accsat, acchue = 0;
>      int medhue, maxhue;
> +    int avgh = 0, avgs = 0, avgl = 0;
>      int64_t toty = 0, totu = 0, totv = 0, totsat=0;
>      int64_t tothue = 0;
>      int64_t dify = 0, difu = 0, difv = 0;
> @@ -800,7 +976,7 @@ static int filter_frame16(AVFilterLink *link, AVFrame
> *in)
>
>      int filtot[FILT_NUMB] = {0};
>      AVFrame *prev;
> -    int ret;
> +
>      AVFrame *sat = s->frame_sat;
>      AVFrame *hue = s->frame_hue;
>      const uint16_t *p_sat = (uint16_t *)sat->data[0];
> @@ -820,21 +996,34 @@ static int filter_frame16(AVFilterLink *link, AVFrame
> *in)
>
>      if (s->outfilter != FILTER_NONE) {
>          out = av_frame_clone(in);
> -        if (!out) {
> -            av_frame_free(&in);
> -            return AVERROR(ENOMEM);
> -        }
> -        ret = ff_inlink_make_frame_writable(link, &out);
> -        if (ret < 0) {
> -            av_frame_free(&out);
> -            av_frame_free(&in);
> -            return ret;
> -        }
> +        av_frame_make_writable(out);
>      }
>
>      ff_filter_execute(ctx, compute_sat_hue_metrics16, &td_huesat,
>                        NULL, FFMIN(s->chromah,
> ff_filter_get_nb_threads(ctx)));
>
> +    // Calculate HSL information.
> +    YUV2RGB(in, AV_PIX_FMT_RGB24, s->frame_rgb);
> +    ThreadDataHSLMetrics td_hsl = {
> +        .src   = s->frame_rgb,
> +        .dst_h = s->hsl_h,
> +        .dst_s = s->hsl_s,
> +        .dst_l = s->hsl_l,
> +    };
> +    ff_filter_execute(ctx, compute_hsl, &td_hsl,
> +                      NULL, FFMIN(link->h, ff_filter_get_nb_threads(ctx)));
> +
> +    int sumh = 0, sums = 0, suml = 0;
> +    for (j = 0; j < s->lumah; j++) {
> +        sumh += s->hsl_h[j];
> +        sums += s->hsl_s[j];
> +        suml += s->hsl_l[j];
> +    }
> +
> +    avgh = sumh / s->lumah;
> +    avgs = sums / s->lumah;
> +    avgl = suml / s->lumah;
> +
>      // Calculate luma histogram and difference with previous frame or
> field.
>      memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
>      for (j = 0; j < link->h; j++) {
> @@ -972,6 +1161,10 @@ static int filter_frame16(AVFilterLink *link, AVFrame
> *in)
>      SET_META("HUEMED",  "%d", medhue);
>      SET_META("HUEAVG",  "%g", 1.0 * tothue / s->cfs);
>
> +    SET_META("HAVG",    "%d", avgh);
> +    SET_META("SAVG",    "%d", avgs);
> +    SET_META("LAVG",    "%d", avgl);
> +
>      SET_META("YDIF",    "%g", 1.0 * dify / s->fs);
>      SET_META("UDIF",    "%g", 1.0 * difu / s->cfs);
>      SET_META("VDIF",    "%g", 1.0 * difv / s->cfs);
> @@ -1009,6 +1202,7 @@ static const AVFilterPad signalstats_inputs[] = {
>      {
>          .name           = "default",
>          .type           = AVMEDIA_TYPE_VIDEO,
> +        .config_props   = config_input,
>          .filter_frame   = filter_frame,
>      },
>  };
> --
> 2.24.3 (Apple Git-128)
>
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel@ffmpeg.org
> https://ffmpeg.org/mailman/listinfo/ffmpeg-devel
>
> To unsubscribe, visit link above, or email
> ffmpeg-devel-request@ffmpeg.org with subject "unsubscribe".
>
diff mbox series

Patch

diff --git a/libavfilter/vf_signalstats.c b/libavfilter/vf_signalstats.c
index b4d1029296..90dbe853fa 100644
--- a/libavfilter/vf_signalstats.c
+++ b/libavfilter/vf_signalstats.c
@@ -1,7 +1,9 @@ 
 /*
  * Copyright (c) 2010 Mark Heath mjpeg0 @ silicontrip dot org
- * Copyright (c) 2014 Clément Bœsch
+ * Copyright (c) 2014 Clément Bœsch
  * Copyright (c) 2014 Dave Rice @dericed
+ * Copyright (c) 2022 Wang Wei <wangwei1237@gmail.com>
+ * Copyright (c) 2022 Liu yizhuo  <yizhuo.liu753@gmail.com>
  *
  * This file is part of FFmpeg.
  *
@@ -23,8 +25,8 @@ 
 #include "libavutil/intreadwrite.h"
 #include "libavutil/opt.h"
 #include "libavutil/pixdesc.h"
-#include "filters.h"
 #include "internal.h"
+#include "libswscale/swscale.h"
 
 enum FilterMode {
     FILTER_NONE = -1,
@@ -36,6 +38,8 @@  enum FilterMode {
 
 typedef struct SignalstatsContext {
     const AVClass *class;
+    int lumah;      // height of luma plane
+    int lumaw;      // width of luma plane
     int chromah;    // height of chroma plane
     int chromaw;    // width of chroma plane
     int hsub;       // horizontal subsampling
@@ -56,6 +60,11 @@  typedef struct SignalstatsContext {
 
     AVFrame *frame_sat;
     AVFrame *frame_hue;
+    AVFrame *frame_rgb;
+    
+    int *hsl_h;
+    int *hsl_s;
+    int *hsl_l;
 } SignalstatsContext;
 
 typedef struct ThreadData {
@@ -65,9 +74,21 @@  typedef struct ThreadData {
 
 typedef struct ThreadDataHueSatMetrics {
     const AVFrame *src;
-    AVFrame *dst_sat, *dst_hue;
+    AVFrame *dst_sat, *dst_hue, *dst_h, *dst_s, *dst_l;
 } ThreadDataHueSatMetrics;
 
+typedef struct ThreadDataHSLMetrics {
+    const AVFrame *src;
+    int *dst_h, *dst_s, *dst_l;
+} ThreadDataHSLMetrics;
+
+typedef float num;
+
+static const float EPSILON = 1e-9;
+
+/** @brief Equal of A and B */
+#define EQ(A,B)    ((fabs((A) - (B)) < EPSILON) ? 1 : 0)
+
 #define OFFSET(x) offsetof(SignalstatsContext, x)
 #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
 
@@ -101,6 +122,7 @@  static av_cold int init(AVFilterContext *ctx)
     s->yuv_color[0] = (( 66*r + 129*g +  25*b + (1<<7)) >> 8) +  16;
     s->yuv_color[1] = ((-38*r + -74*g + 112*b + (1<<7)) >> 8) + 128;
     s->yuv_color[2] = ((112*r + -94*g + -18*b + (1<<7)) >> 8) + 128;
+
     return 0;
 }
 
@@ -110,11 +132,15 @@  static av_cold void uninit(AVFilterContext *ctx)
     av_frame_free(&s->frame_prev);
     av_frame_free(&s->frame_sat);
     av_frame_free(&s->frame_hue);
+    av_frame_free(&s->frame_rgb);
     av_freep(&s->jobs_rets);
     av_freep(&s->histy);
     av_freep(&s->histu);
     av_freep(&s->histv);
     av_freep(&s->histsat);
+    av_freep(&s->hsl_h);
+    av_freep(&s->hsl_s);
+    av_freep(&s->hsl_l);
 }
 
 // TODO: add more
@@ -151,6 +177,23 @@  static AVFrame *alloc_frame(enum AVPixelFormat pixfmt, int w, int h)
     return frame;
 }
 
+static int config_input(AVFilterLink *inlink)
+{
+    // Video input data avilable
+    AVFilterContext *ctx = inlink->dst;
+    SignalstatsContext *s = ctx->priv;
+    
+    // free previous buffers in case they are allocated already
+    av_frame_free(&s->frame_rgb);
+    s->frame_rgb = alloc_frame(AV_PIX_FMT_RGB24, inlink->w, inlink->h);
+
+    if (!s->frame_rgb) {
+        return AVERROR(ENOMEM);
+    }
+
+    return 0;
+}
+
 static int config_output(AVFilterLink *outlink)
 {
     AVFilterContext *ctx = outlink->src;
@@ -172,12 +215,22 @@  static int config_output(AVFilterLink *outlink)
     outlink->w = inlink->w;
     outlink->h = inlink->h;
 
+    s->lumaw   = inlink->w;
+    s->lumah   = inlink->h;
+
     s->chromaw = AV_CEIL_RSHIFT(inlink->w, s->hsub);
     s->chromah = AV_CEIL_RSHIFT(inlink->h, s->vsub);
 
     s->fs = inlink->w * inlink->h;
     s->cfs = s->chromaw * s->chromah;
 
+    s->hsl_h = av_malloc_array(s->lumah, sizeof(*s->hsl_h));
+    s->hsl_s = av_malloc_array(s->lumah, sizeof(*s->hsl_s));
+    s->hsl_l = av_malloc_array(s->lumah, sizeof(*s->hsl_l));
+    if (!s->hsl_h || !s->hsl_s || !s->hsl_l) {
+        return AVERROR(ENOMEM);
+    }
+
     s->nb_jobs   = FFMAX(1, FFMIN(inlink->h, ff_filter_get_nb_threads(ctx)));
     s->jobs_rets = av_malloc_array(s->nb_jobs, sizeof(*s->jobs_rets));
     if (!s->jobs_rets)
@@ -455,6 +508,110 @@  static const struct {
     {NULL}
 };
 
+static void YUV2RGB(const AVFrame* src, enum AVPixelFormat dstFormat, AVFrame* dst) 
+{
+    int width  = src->width;
+    int height = src->height;
+    
+    struct SwsContext* conversion = NULL;
+    conversion = sws_getContext(width,
+                                height,
+                                (enum AVPixelFormat)src->format,
+                                width,
+                                height,
+                                dstFormat,
+                                SWS_FAST_BILINEAR,
+                                NULL,
+                                NULL,
+                                NULL);
+    sws_scale(conversion, (const uint8_t * const *)src->data, src->linesize, 0, height, dst->data, dst->linesize);
+    sws_freeContext(conversion);
+}
+
+/** 
+ * @brief Convert an sRGB color to Hue-Saturation-Lightness (HSL)
+ * 
+ * @param H, S, L pointers to hold the result
+ * @param R, G, B the input sRGB values scaled in [0,1]
+ *
+ * This routine transforms from sRGB to the double hexcone HSL color space
+ * The sRGB values are assumed to be between 0 and 1.  The outputs are
+ *   H = hexagonal hue angle                (0 <= H < 360),
+ *   S = { C/(2L)     if L <= 1/2           (0 <= S <= 1),
+ *       { C/(2 - 2L) if L >  1/2
+ *   L = (max(R',G',B') + min(R',G',B'))/2  (0 <= L <= 1),
+ * where C = max(R',G',B') - min(R',G',B').
+ *
+ * Wikipedia: http://en.wikipedia.org/wiki/HSL_and_HSV
+ */
+static void RGB2HSL(num *H, num *S, num *L, num R, num G, num B)
+{
+    num Max = FFMAX3(R, G, B);
+    num Min = FFMIN3(R, G, B);
+    num C = Max - Min;
+
+    *L = (Max + Min) / 2;
+    
+    if (C > 0) {
+        if (EQ(Max, R)) {
+            *H = (G - B) / C;
+            
+            if (G < B) {
+                *H += 6;
+            }
+        } else if (EQ(Max, G)) {
+            *H = 2 + (B - R) / C;
+        } else {
+            *H = 4 + (R - G) / C;
+        }
+
+        *H *= 60;
+        *S = (*L <= 0.5) ? (C/(2*(*L))) : (C/(2 - 2*(*L)));
+    } else {
+        *H = *S = 0;
+    }
+}
+
+static int compute_hsl(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs) 
+{
+    int i, j;
+    num H, S, L;
+    ThreadDataHSLMetrics *td = arg;
+    const SignalstatsContext *s = ctx->priv;
+    const AVFrame *src = td->src;
+    int *dst_h = td->dst_h;
+    int *dst_s = td->dst_s;
+    int *dst_l = td->dst_l;
+    
+    const int slice_start = (s->lumah * jobnr    ) / nb_jobs;
+    const int slice_end   = (s->lumah * (jobnr+1)) / nb_jobs;
+
+    const int lsz_src = src->linesize[0];
+    const uint8_t *p_src = src->data[0] + slice_start * lsz_src;
+
+    for (j = slice_start; j < slice_end; j++) {
+        int line_h = 0, line_s = 0, line_l = 0;
+        for (i = 0; i < s->lumaw; i++) {
+            const uint8_t rgbr = p_src[3 * i];
+            const uint8_t rgbg = p_src[3 * i + 1];
+            const uint8_t rgbb = p_src[3 * i + 2];
+            
+            RGB2HSL(&H, &S, &L, 1.0 * rgbr / 255, 1.0 * rgbg / 255, 1.0 * rgbb / 255);
+            line_h += (uint16_t)(H + 0.5);
+            line_s += (uint8_t)(S * 100 + 0.5);
+            line_l += (uint8_t)(L * 100 + 0.5);
+        }
+        
+        dst_h[j] = line_h / s->lumaw;
+        dst_s[j] = line_s / s->lumaw;
+        dst_l[j] = line_l / s->lumaw;
+
+        p_src += lsz_src;
+    }
+
+    return 0;
+}
+
 static int compute_sat_hue_metrics8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
 {
     int i, j;
@@ -562,11 +719,12 @@  static int filter_frame8(AVFilterLink *link, AVFrame *in)
     int accy, accu, accv;
     int accsat, acchue = 0;
     int medhue, maxhue;
+    int avgh = 0, avgs = 0, avgl = 0;
     int toty = 0, totu = 0, totv = 0, totsat=0;
     int tothue = 0;
     int dify = 0, difu = 0, difv = 0;
     uint16_t masky = 0, masku = 0, maskv = 0;
-    int ret;
+
     int filtot[FILT_NUMB] = {0};
     AVFrame *prev;
 
@@ -589,21 +747,34 @@  static int filter_frame8(AVFilterLink *link, AVFrame *in)
 
     if (s->outfilter != FILTER_NONE) {
         out = av_frame_clone(in);
-        if (!out) {
-            av_frame_free(&in);
-            return AVERROR(ENOMEM);
-        }
-        ret = ff_inlink_make_frame_writable(link, &out);
-        if (ret < 0) {
-            av_frame_free(&out);
-            av_frame_free(&in);
-            return ret;
-        }
+        av_frame_make_writable(out);
     }
 
     ff_filter_execute(ctx, compute_sat_hue_metrics8, &td_huesat,
                       NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
 
+    // Calculate HSL information.
+    YUV2RGB(in, AV_PIX_FMT_RGB24, s->frame_rgb);
+    ThreadDataHSLMetrics td_hsl = {
+        .src   = s->frame_rgb,
+        .dst_h = s->hsl_h,
+        .dst_s = s->hsl_s,
+        .dst_l = s->hsl_l,
+    };
+    ff_filter_execute(ctx, compute_hsl, &td_hsl,
+                      NULL, FFMIN(link->h, ff_filter_get_nb_threads(ctx)));
+    
+    int sumh = 0, sums = 0, suml = 0;
+    for (j = 0; j < s->lumah; j++) {
+        sumh += s->hsl_h[j];
+        sums += s->hsl_s[j];
+        suml += s->hsl_l[j];
+    }
+
+    avgh = sumh / s->lumah;
+    avgs = sums / s->lumah;
+    avgl = suml / s->lumah;
+
     // Calculate luma histogram and difference with previous frame or field.
     memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
     for (j = 0; j < link->h; j++) {
@@ -746,6 +917,10 @@  static int filter_frame8(AVFilterLink *link, AVFrame *in)
     SET_META("HUEMED",  "%d", medhue);
     SET_META("HUEAVG",  "%g", 1.0 * tothue / s->cfs);
 
+    SET_META("HAVG",    "%d", avgh);
+    SET_META("SAVG",    "%d", avgs);
+    SET_META("LAVG",    "%d", avgl);
+
     SET_META("YDIF",    "%g", 1.0 * dify / s->fs);
     SET_META("UDIF",    "%g", 1.0 * difu / s->cfs);
     SET_META("VDIF",    "%g", 1.0 * difv / s->cfs);
@@ -793,6 +968,7 @@  static int filter_frame16(AVFilterLink *link, AVFrame *in)
     int accy, accu, accv;
     int accsat, acchue = 0;
     int medhue, maxhue;
+    int avgh = 0, avgs = 0, avgl = 0;
     int64_t toty = 0, totu = 0, totv = 0, totsat=0;
     int64_t tothue = 0;
     int64_t dify = 0, difu = 0, difv = 0;
@@ -800,7 +976,7 @@  static int filter_frame16(AVFilterLink *link, AVFrame *in)
 
     int filtot[FILT_NUMB] = {0};
     AVFrame *prev;
-    int ret;
+
     AVFrame *sat = s->frame_sat;
     AVFrame *hue = s->frame_hue;
     const uint16_t *p_sat = (uint16_t *)sat->data[0];
@@ -820,21 +996,34 @@  static int filter_frame16(AVFilterLink *link, AVFrame *in)
 
     if (s->outfilter != FILTER_NONE) {
         out = av_frame_clone(in);
-        if (!out) {
-            av_frame_free(&in);
-            return AVERROR(ENOMEM);
-        }
-        ret = ff_inlink_make_frame_writable(link, &out);
-        if (ret < 0) {
-            av_frame_free(&out);
-            av_frame_free(&in);
-            return ret;
-        }
+        av_frame_make_writable(out);
     }
 
     ff_filter_execute(ctx, compute_sat_hue_metrics16, &td_huesat,
                       NULL, FFMIN(s->chromah, ff_filter_get_nb_threads(ctx)));
 
+    // Calculate HSL information.
+    YUV2RGB(in, AV_PIX_FMT_RGB24, s->frame_rgb);
+    ThreadDataHSLMetrics td_hsl = {
+        .src   = s->frame_rgb,
+        .dst_h = s->hsl_h,
+        .dst_s = s->hsl_s,
+        .dst_l = s->hsl_l,
+    };
+    ff_filter_execute(ctx, compute_hsl, &td_hsl,
+                      NULL, FFMIN(link->h, ff_filter_get_nb_threads(ctx)));
+    
+    int sumh = 0, sums = 0, suml = 0;
+    for (j = 0; j < s->lumah; j++) {
+        sumh += s->hsl_h[j];
+        sums += s->hsl_s[j];
+        suml += s->hsl_l[j];
+    }
+
+    avgh = sumh / s->lumah;
+    avgs = sums / s->lumah;
+    avgl = suml / s->lumah;
+
     // Calculate luma histogram and difference with previous frame or field.
     memset(s->histy, 0, s->maxsize * sizeof(*s->histy));
     for (j = 0; j < link->h; j++) {
@@ -972,6 +1161,10 @@  static int filter_frame16(AVFilterLink *link, AVFrame *in)
     SET_META("HUEMED",  "%d", medhue);
     SET_META("HUEAVG",  "%g", 1.0 * tothue / s->cfs);
 
+    SET_META("HAVG",    "%d", avgh);
+    SET_META("SAVG",    "%d", avgs);
+    SET_META("LAVG",    "%d", avgl);
+    
     SET_META("YDIF",    "%g", 1.0 * dify / s->fs);
     SET_META("UDIF",    "%g", 1.0 * difu / s->cfs);
     SET_META("VDIF",    "%g", 1.0 * difv / s->cfs);
@@ -1009,6 +1202,7 @@  static const AVFilterPad signalstats_inputs[] = {
     {
         .name           = "default",
         .type           = AVMEDIA_TYPE_VIDEO,
+        .config_props   = config_input,
         .filter_frame   = filter_frame,
     },
 };