diff mbox

[FFmpeg-devel,GSOC,1/3] lavc/cfhd:3d transform decoding for both progressive and interlaced

Message ID 20180814073329.7257-1-deepgagan231197@gmail.com
State Superseded
Headers show

Commit Message

Gagandeep Singh Aug. 14, 2018, 7:33 a.m. UTC
---
 libavcodec/cfhd.c | 511 ++++++++++++++++++++++++++++++++++++++++------
 libavcodec/cfhd.h |  13 +-
 2 files changed, 454 insertions(+), 70 deletions(-)

Comments

Gagandeep Singh Aug. 14, 2018, 7:34 a.m. UTC | #1
On Tue, Aug 14, 2018 at 1:03 PM Gagandeep Singh <deepgagan231197@gmail.com>
wrote:

> ---
>  libavcodec/cfhd.c | 511 ++++++++++++++++++++++++++++++++++++++++------
>  libavcodec/cfhd.h |  13 +-
>  2 files changed, 454 insertions(+), 70 deletions(-)
>
> diff --git a/libavcodec/cfhd.c b/libavcodec/cfhd.c
> index 76838f0869..3906d7a86d 100644
> --- a/libavcodec/cfhd.c
> +++ b/libavcodec/cfhd.c
> @@ -41,12 +41,15 @@
>  #define ALPHA_COMPAND_GAIN 9400
>
>  enum CFHDParam {
> +    TransformType    =  10,
>      ChannelCount     =  12,
>      SubbandCount     =  14,
> +    Pframe           =  19,
>      ImageWidth       =  20,
>      ImageHeight      =  21,
>      LowpassPrecision =  35,
>      SubbandNumber    =  48,
> +    EncodingMethod   =  52,
>      Quantization     =  53,
>      ChannelNumber    =  62,
>      SampleFlags      =  68,
> @@ -64,6 +67,7 @@ static av_cold int cfhd_init(AVCodecContext *avctx)
>
>      avctx->bits_per_raw_sample = 10;
>      s->avctx                   = avctx;
> +    s->progressive             = 0;
>
>      return ff_cfhd_init_vlcs(s);
>  }
> @@ -84,6 +88,10 @@ static void init_peak_table_defaults(CFHDContext *s)
>
>  static void init_frame_defaults(CFHDContext *s)
>  {
> +    s->sample_type       = 0;
> +    s->transform_type    = 0;
> +    s->pframe            = 0;
> +    s->first_wavelet     = 0;
>      s->coded_width       = 0;
>      s->coded_height      = 0;
>      s->cropped_height    = 0;
> @@ -97,14 +105,15 @@ static void init_frame_defaults(CFHDContext *s)
>      s->pshift            = 1;
>      s->codebook          = 0;
>      s->difference_coding = 0;
> -    s->progressive       = 0;
>      init_plane_defaults(s);
>      init_peak_table_defaults(s);
>  }
>
>  /* TODO: merge with VLC tables or use LUT */
> -static inline int dequant_and_decompand(int level, int quantisation, int
> codebook)
> +static inline int dequant_and_decompand(int level, int quantisation, int
> codebook, int lossless)
>  {
> +    if (lossless)
> +        return level;
>      if (codebook == 0 || codebook == 1) {
>          int64_t abslevel = abs(level);
>          if (level < 264)
> @@ -193,16 +202,21 @@ static inline void filter(int16_t *output, ptrdiff_t
> out_stride,
>      }
>  }
>
> -static inline void interlaced_vertical_filter(int16_t *output, int16_t
> *low, int16_t *high,
> -                         int width, int linesize, int plane)
> +static inline void inverse_temporal_filter(int16_t *output, int16_t *low,
> int16_t *high,
> +                         int width, int linesize, int
> temporal_for_highpass)
>  {
>      int i;
>      int16_t even, odd;
>      for (i = 0; i < width; i++) {
>          even = (low[i] - high[i])/2;
>          odd  = (low[i] + high[i])/2;
> -        output[i]            = av_clip_uintp2(even, 10);
> -        output[i + linesize] = av_clip_uintp2(odd, 10);
> +        if (!temporal_for_highpass) {
> +            output[i]            = av_clip_uintp2(even, 10);
> +            output[i + linesize] = av_clip_uintp2(odd, 10);
> +        } else {
> +            low[i]  = even;
> +            high[i] = odd;
> +        }
>      }
>  }
>  static void horiz_filter(int16_t *output, int16_t *low, int16_t *high,
> @@ -231,9 +245,12 @@ static void free_buffers(CFHDContext *s)
>      for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
>          av_freep(&s->plane[i].idwt_buf);
>          av_freep(&s->plane[i].idwt_tmp);
> -
> -        for (j = 0; j < 9; j++)
> -            s->plane[i].subband[j] = NULL;
> +        if (s->transform_type == 0)
> +            for (j = 0; j < 9; j++)
> +                s->plane[i].subband[j] = NULL;
> +        else
> +            for (j = 0; j < 17; j++)
> +                s->plane[i].subband[j] = NULL;
>
>          for (j = 0; j < 8; j++)
>              s->plane[i].l_h[j] = NULL;
> @@ -247,7 +264,7 @@ static int alloc_buffers(AVCodecContext *avctx)
>      CFHDContext *s = avctx->priv_data;
>      int i, j, ret, planes;
>      int chroma_x_shift, chroma_y_shift;
> -    unsigned k;
> +    unsigned k, t;
>
>      if ((ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height))
> < 0)
>          return ret;
> @@ -261,6 +278,7 @@ static int alloc_buffers(AVCodecContext *avctx)
>
>      for (i = 0; i < planes; i++) {
>          int w8, h8, w4, h4, w2, h2;
> +        int16_t *frame2;
>          int width  = i ? avctx->width  >> chroma_x_shift : avctx->width;
>          int height = i ? avctx->height >> chroma_y_shift : avctx->height;
>          ptrdiff_t stride = FFALIGN(width  / 8, 8) * 8;
> @@ -277,28 +295,68 @@ static int alloc_buffers(AVCodecContext *avctx)
>          w2 = w4 * 2;
>          h2 = h4 * 2;
>
> -        s->plane[i].idwt_buf =
> -            av_mallocz_array(height * stride,
> sizeof(*s->plane[i].idwt_buf));
> -        s->plane[i].idwt_tmp =
> -            av_malloc_array(height * stride,
> sizeof(*s->plane[i].idwt_tmp));
> -        if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
> -            return AVERROR(ENOMEM);
> -
> -        s->plane[i].subband[0] = s->plane[i].idwt_buf;
> -        s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
> -        s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
> -        s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
> -        s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
> -        s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
> -        s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
> -        s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
> -        s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
> -        s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
> -
> -        for (j = 0; j < DWT_LEVELS; j++) {
> -            for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
> -                s->plane[i].band[j][k].a_width  = w8 << j;
> -                s->plane[i].band[j][k].a_height = h8 << j;
> +        if (s->transform_type == 0) {
> +            s->plane[i].idwt_buf =
> +                av_mallocz_array(height * stride,
> sizeof(*s->plane[i].idwt_buf));
> +            s->plane[i].idwt_tmp =
> +                av_malloc_array(height * stride,
> sizeof(*s->plane[i].idwt_tmp));
> +            if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
> +                return AVERROR(ENOMEM);
> +        } else if (s->transform_type == 2) {
> +            s->plane[i].idwt_buf =
> +                av_mallocz_array(2 * height * stride,
> sizeof(*s->plane[i].idwt_buf));
> +            s->plane[i].idwt_tmp =
> +                av_malloc_array(2 * height * stride,
> sizeof(*s->plane[i].idwt_tmp));
> +            if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
> +                return AVERROR(ENOMEM);
> +        }
> +
> +        if (s->transform_type == 0) {
> +            s->plane[i].subband[0] = s->plane[i].idwt_buf;
> +            s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
> +            s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
> +            s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
> +            s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
> +            s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
> +            s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
> +            s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
> +            s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
> +            s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
> +        } else if (s->transform_type == 2) {
> +            s->plane[i].subband[0]  = s->plane[i].idwt_buf;
> +            s->plane[i].subband[1]  = s->plane[i].idwt_buf + 2 * w8 * h8;
> +            s->plane[i].subband[2]  = s->plane[i].idwt_buf + 1 * w8 * h8;
> +            s->plane[i].subband[3]  = s->plane[i].idwt_buf + 3 * w8 * h8;
> +            s->plane[i].subband[4]  = s->plane[i].idwt_buf + 2 * w4 * h4;
> +            s->plane[i].subband[5]  = s->plane[i].idwt_buf + 1 * w4 * h4;
> +            s->plane[i].subband[6]  = s->plane[i].idwt_buf + 3 * w4 * h4;
> +            frame2 =
> +            s->plane[i].subband[7]  = s->plane[i].idwt_buf + 4 * w2 * h2;
> +            s->plane[i].subband[8]  = frame2 + 2 * w4 * h4;
> +            s->plane[i].subband[9]  = frame2 + 1 * w4 * h4;
> +            s->plane[i].subband[10] = frame2 + 3 * w4 * h4;
> +            s->plane[i].subband[11] = frame2 + 2 * w2 * h2;
> +            s->plane[i].subband[12] = frame2 + 1 * w2 * h2;
> +            s->plane[i].subband[13] = frame2 + 3 * w2 * h2;
> +            s->plane[i].subband[14] = s->plane[i].idwt_buf + 2 * w2 * h2;
> +            s->plane[i].subband[15] = s->plane[i].idwt_buf + 1 * w2 * h2;
> +            s->plane[i].subband[16] = s->plane[i].idwt_buf + 3 * w2 * h2;
> +        }
> +
> +        if (s->transform_type == 0) {
> +            for (j = 0; j < DWT_LEVELS - 3; j++) {
> +                for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++)
> {
> +                    s->plane[i].band[j][k].a_width  = w8 << j;
> +                    s->plane[i].band[j][k].a_height = h8 << j;
> +                }
> +            }
> +        } else if (s->transform_type == 2) {
> +            for (j = 0; j < DWT_LEVELS; j++) {
> +                t = j < 1 ? 0 : (j < 3 ? 1 : 2);
> +                for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[0]); k++)
> {
> +                    s->plane[i].band[j][k].a_width  = w8 << t;
> +                    s->plane[i].band[j][k].a_height = h8 << t;
> +                }
>              }
>          }
>
> @@ -311,6 +369,11 @@ static int alloc_buffers(AVCodecContext *avctx)
>          // s->plane[i].l_h[5] = ll1;
>          s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
>          s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
> +        if (s->transform_type == 2) {
> +            frame2 = s->plane[i].idwt_tmp + 4 * w2 * h2;
> +            s->plane[i].l_h[8] = frame2;
> +            s->plane[i].l_h[9] = frame2 + 2 * w2 * h2;
> +            }
>      }
>
>      s->a_height = s->coded_height;
> @@ -349,6 +412,9 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>          } else if (tag == SampleFlags) {
>              av_log(avctx, AV_LOG_DEBUG, "Progressive?%"PRIu16"\n", data);
>              s->progressive = data & 0x0001;
> +        } else if (tag == Pframe) {
> +            s->pframe = 1;
> +            av_log(avctx, AV_LOG_DEBUG, "Frame type %"PRIu16"\n", data);
>          } else if (tag == ImageWidth) {
>              av_log(avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
>              s->coded_width = data;
> @@ -373,7 +439,7 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>              }
>          } else if (tag == SubbandCount) {
>              av_log(avctx, AV_LOG_DEBUG, "Subband Count: %"PRIu16"\n",
> data);
> -            if (data != SUBBAND_COUNT) {
> +            if (data != 10 && data != 17) {
>                  av_log(avctx, AV_LOG_ERROR, "Subband Count of %"PRIu16"
> is unsupported\n", data);
>                  ret = AVERROR_PATCHWELCOME;
>                  break;
> @@ -405,7 +471,7 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>          } else if (tag == 51) {
>              av_log(avctx, AV_LOG_DEBUG, "Subband number actual
> %"PRIu16"\n", data);
>              s->subband_num_actual = data;
> -            if (s->subband_num_actual >= 10) {
> +            if (s->subband_num_actual >= 17 && s->subband_num_actual !=
> 255) {
>                  av_log(avctx, AV_LOG_ERROR, "Invalid subband number
> actual\n");
>                  ret = AVERROR(EINVAL);
>                  break;
> @@ -420,9 +486,15 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>              s->prescale_shift[1] = (data >> 3) & 0x7;
>              s->prescale_shift[2] = (data >> 6) & 0x7;
>              av_log(avctx, AV_LOG_DEBUG, "Prescale shift (VC-5): %x\n",
> data);
> +        } else if (tag == EncodingMethod) {
> +            s->encode_method = data;
> +            av_log(avctx, AV_LOG_DEBUG, "Encode Method for Subband %d :
> %x\n",s->subband_num_actual, data);
>          } else if (tag == 27) {
>              av_log(avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n",
> data);
> -            if (data < 3 || data >
> s->plane[s->channel_num].band[0][0].a_width) {
> +            if (s->coded_width == 0){
> +                s->coded_width = data << 3;
> +              }
> +                if (data < 3) {
>                  av_log(avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
>                  ret = AVERROR(EINVAL);
>                  break;
> @@ -431,20 +503,21 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>              s->plane[s->channel_num].band[0][0].stride = data;
>          } else if (tag == 28) {
>              av_log(avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n",
> data);
> -            if (data < 3 || data >
> s->plane[s->channel_num].band[0][0].a_height) {
> +            if (s->coded_height == 0)
> +                s->coded_height = data << 3;
> +            if (data < 3) {
>                  av_log(avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
>                  ret = AVERROR(EINVAL);
>                  break;
>              }
>              s->plane[s->channel_num].band[0][0].height = data;
> -        } else if (tag == 1)
> +        } else if (tag == 1) {
> +            s->sample_type = data;
> +            if (data == 2)
> +                s->pframe = 1;
>              av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
> -        else if (tag == 10) {
> -            if (data != 0) {
> -                avpriv_report_missing_feature(avctx, "Transform type of
> %"PRIu16, data);
> -                ret = AVERROR_PATCHWELCOME;
> -                break;
> -            }
> +        } else if (tag == 10) {
> +            s->transform_type = data;
>              av_log(avctx, AV_LOG_DEBUG, "Transform-type? %"PRIu16"\n",
> data);
>          } else if (abstag >= 0x4000 && abstag <= 0x40ff) {
>              if (abstag == 0x4001)
> @@ -551,8 +624,8 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>              av_log(avctx, AV_LOG_DEBUG,  "Unknown tag %i data %x\n", tag,
> data);
>
>          /* Some kind of end of header tag */
> -        if (tag == 4 && data == 0x1a4a && s->coded_width &&
> s->coded_height &&
> -            s->coded_format != AV_PIX_FMT_NONE) {
> +        if (((tag == 4 && (data == 0x1a4a || data == 0xf0f)) ||
> s->sample_type == 1) && s->coded_width && s->coded_height &&
> +            s->coded_format != AV_PIX_FMT_NONE && s->sample_type != 3 &&
> s->sample_type != 6) {
>              if (s->a_width != s->coded_width || s->a_height !=
> s->coded_height ||
>                  s->a_format != s->coded_format) {
>                  free_buffers(s);
> @@ -619,8 +692,7 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>
>              av_log(avctx, AV_LOG_DEBUG, "Lowpass coefficients %d\n",
> lowpass_width * lowpass_height);
>          }
> -
> -        if (tag == 55 && s->subband_num_actual != 255 && s->a_width &&
> s->a_height) {
> +        if ((tag == 55 || tag == 82) && s->a_width && s->a_height) {
>              int highpass_height =
> s->plane[s->channel_num].band[s->level][s->subband_num].height;
>              int highpass_width  =
> s->plane[s->channel_num].band[s->level][s->subband_num].width;
>              int highpass_a_width =
> s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
> @@ -638,18 +710,24 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>              }
>
>              if (highpass_height > highpass_a_height || highpass_width >
> highpass_a_width || a_expected < highpass_height *
> (uint64_t)highpass_stride) {
> -                av_log(avctx, AV_LOG_ERROR, "Too many highpass
> coefficients\n");
> -                ret = AVERROR(EINVAL);
> -                goto end;
> +                  if (s->subband_num_actual != 255) {
> +                      av_log(avctx, AV_LOG_ERROR, "Too many highpass
> coefficients\n");
> +                      ret = AVERROR(EINVAL);
> +                      goto end;
> +                  }
>              }
>              expected = highpass_height * highpass_stride;
>
>              av_log(avctx, AV_LOG_DEBUG, "Start subband coeffs plane %i
> level %i codebook %i expected %i\n", s->channel_num, s->level, s->codebook,
> expected);
>
>              init_get_bits(&s->gb, gb.buffer,
> bytestream2_get_bytes_left(&gb) * 8);
> +            if (s->subband_num_actual == 255) {
> +                expected = 0;
> +                goto finish;
> +            }
>              {
>                  OPEN_READER(re, &s->gb);
> -                if (!s->codebook) {
> +                if (!s->codebook && !(s->transform_type == 2 &&
> s->subband_num_actual == 7)) {
>                      while (1) {
>                          UPDATE_CACHE(re, &s->gb);
>                          GET_RL_VLC(level, run, re, &s->gb,
> s->table_9_rl_vlc,
> @@ -664,9 +742,14 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                          if (count > expected)
>                              break;
>
> -                        coeff = dequant_and_decompand(level,
> s->quantisation, 0);
> +                        coeff = dequant_and_decompand(level,
> s->quantisation, 0, (s->sample_type == 2 || s->sample_type == 3) &&
> s->pframe && s->subband_num_actual == 7 && s->encode_method == 5);
>                          for (i = 0; i < run; i++)
> -                            *coeff_data++ = coeff;
> +                            if (tag != 82)
> +                                *coeff_data++  = coeff;
> +                            else {
> +                                *coeff_data   |= coeff << 8;
> +                                *coeff_data++ *= s->quantisation;
> +                            }
>                      }
>                      if (s->peak.level)
>                          peak_table(coeff_data - expected, &s->peak,
> expected);
> @@ -688,9 +771,14 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                          if (count > expected)
>                              break;
>
> -                        coeff = dequant_and_decompand(level,
> s->quantisation, s->codebook);
> +                        coeff = dequant_and_decompand(level,
> s->quantisation, s->codebook, (s->sample_type == 2 || s->sample_type == 3)
> && s->pframe && s->subband_num_actual == 7 && s->encode_method == 5);
>                          for (i = 0; i < run; i++)
> -                            *coeff_data++ = coeff;
> +                            if (tag != 82)
> +                                *coeff_data++  = coeff;
> +                            else {
> +                                *coeff_data   |= coeff << 8;
> +                                *coeff_data++ *= s->quantisation;
> +                            }
>                      }
>                      if (s->peak.level)
>                          peak_table(coeff_data - expected, &s->peak,
> expected);
> @@ -701,12 +789,12 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                  CLOSE_READER(re, &s->gb);
>              }
>
> -            if (count > expected) {
> +            if (count > expected && s->subband_num_actual != 255) {
>                  av_log(avctx, AV_LOG_ERROR, "Escape codeword not found,
> probably corrupt data\n");
>                  ret = AVERROR(EINVAL);
>                  goto end;
>              }
> -
> +            finish:
>              bytes = FFALIGN(AV_CEIL_RSHIFT(get_bits_count(&s->gb), 3), 4);
>              if (bytes > bytestream2_get_bytes_left(&gb)) {
>                  av_log(avctx, AV_LOG_ERROR, "Bitstream overread error\n");
> @@ -726,14 +814,15 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>              }
>          }
>      }
> -
> -    if (!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
> -        s->coded_width || s->coded_height || s->coded_format !=
> AV_PIX_FMT_NONE) {
> +    //disabled to run mountain sample file
> +#if 0
> +    if ((!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
> +        s->coded_width || s->coded_height || s->coded_format !=
> AV_PIX_FMT_NONE) && s->sample_type != 1) {
>          av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
>          ret = AVERROR(EINVAL);
>          goto end;
>      }
> -
> +#endif
>      if (!got_buffer) {
>          av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
>          ret = AVERROR(EINVAL);
> @@ -741,8 +830,9 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>      }
>
>      planes = av_pix_fmt_count_planes(avctx->pix_fmt);
> +    if (s->transform_type == 0 && s->sample_type != 1) {
>      for (plane = 0; plane < planes && !ret; plane++) {
> -        /* level 1 */
> +            /* level 1 */
>          int lowpass_height  = s->plane[plane].band[0][0].height;
>          int lowpass_width   = s->plane[plane].band[0][0].width;
>          int highpass_stride = s->plane[plane].band[0][1].stride;
> @@ -921,14 +1011,305 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>              low  = s->plane[plane].l_h[6];
>              high = s->plane[plane].l_h[7];
>              for (i = 0; i < lowpass_height; i++) {
> -                interlaced_vertical_filter(dst, low, high, lowpass_width
> * 2,  pic->linesize[act_plane]/2, act_plane);
> +                inverse_temporal_filter(dst, low, high, lowpass_width *
> 2,  pic->linesize[act_plane]/2, 0);
>                  low  += lowpass_width * 2;
>                  high += lowpass_width * 2;
>                  dst  += pic->linesize[act_plane];
>              }
>          }
>      }
> +    //this is the serial version on ip sample decoding so buffers
> allocated using alloc_buffers() are not freed,
> +    //so the stored decoded coefficients data is used for generating the
> second frame once empty packet is passed in sample_type = 1
> +    } else if (s->transform_type == 2 && s->sample_type != 1) {
> +        for (plane = 0; plane < planes && !ret; plane++) {
> +            int lowpass_height  = s->plane[plane].band[0][0].height;
> +            int lowpass_width   = s->plane[plane].band[0][0].width;
> +            int highpass_stride = s->plane[plane].band[0][1].stride;
> +            int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
> +            int16_t *low, *high, *output, *dst;
> +
> +            if (lowpass_height > s->plane[plane].band[0][0].a_height ||
> lowpass_width > s->plane[plane].band[0][0].a_width ||
> +                !highpass_stride || s->plane[plane].band[0][1].width >
> s->plane[plane].band[0][1].a_width) {
> +                av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
> +                ret = AVERROR(EINVAL);
> +                goto end;
> +            }
> +
> +            av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i
> %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
> +
> +            low    = s->plane[plane].subband[0];
> +            high   = s->plane[plane].subband[2];
> +            output = s->plane[plane].l_h[0];
> +            for (i = 0; i < lowpass_width; i++) {
> +                vert_filter(output, lowpass_width, low, lowpass_width,
> high, highpass_stride, lowpass_height);
> +                low++;
> +                high++;
> +                output++;
> +            }
> +
> +            low    = s->plane[plane].subband[1];
> +            high   = s->plane[plane].subband[3];
> +            output = s->plane[plane].l_h[1];
> +            for (i = 0; i < lowpass_width; i++) {
> +                vert_filter(output, lowpass_width, low, highpass_stride,
> high, highpass_stride, lowpass_height);
> +                low++;
> +                high++;
> +                output++;
> +            }
> +
> +            low    = s->plane[plane].l_h[0];
> +            high   = s->plane[plane].l_h[1];
> +            output = s->plane[plane].subband[0];
> +            for (i = 0; i < lowpass_height * 2; i++) {
> +                horiz_filter(output, low, high, lowpass_width);
> +                low    += lowpass_width;
> +                high   += lowpass_width;
> +                output += lowpass_width * 2;
> +            }
> +            if (s->bpc == 12) {
> +                output = s->plane[plane].subband[0];
> +                for (i = 0; i < lowpass_height * 2; i++) {
> +                    for (j = 0; j < lowpass_width * 2; j++)
> +                        output[j] *= 4;
> +
> +                    output += lowpass_width * 2;
> +                }
> +            }
> +
> +            lowpass_height  = s->plane[plane].band[1][1].height;
> +            lowpass_width   = s->plane[plane].band[1][1].width;
> +            highpass_stride = s->plane[plane].band[1][1].stride;
> +
> +            if (lowpass_height > s->plane[plane].band[1][1].a_height ||
> lowpass_width > s->plane[plane].band[1][1].a_width ||
> +                !highpass_stride || s->plane[plane].band[1][1].width >
> s->plane[plane].band[1][1].a_width) {
> +                av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
> +                ret = AVERROR(EINVAL);
> +                goto end;
> +            }
> +
> +            av_log(avctx, AV_LOG_DEBUG, "Level 2 lowpass plane %i %i %i
> %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
> +
> +            low    = s->plane[plane].subband[0];
> +            high   = s->plane[plane].subband[5];
> +            output = s->plane[plane].l_h[3];
> +            for (i = 0; i < lowpass_width; i++) {
> +                vert_filter(output, lowpass_width, low, lowpass_width,
> high, highpass_stride, lowpass_height);
> +                low++;
> +                high++;
> +                output++;
> +            }
>
> +            low    = s->plane[plane].subband[4];
> +            high   = s->plane[plane].subband[6];
> +            output = s->plane[plane].l_h[4];
> +            for (i = 0; i < lowpass_width; i++) {
> +                vert_filter(output, lowpass_width, low, highpass_stride,
> high, highpass_stride, lowpass_height);
> +                low++;
> +                high++;
> +                output++;
> +            }
> +
> +            low    = s->plane[plane].l_h[3];
> +            high   = s->plane[plane].l_h[4];
> +            output = s->plane[plane].subband[0];
> +            for (i = 0; i < lowpass_height * 2; i++) {
> +                horiz_filter(output, low, high, lowpass_width);
> +                low    += lowpass_width;
> +                high   += lowpass_width;
> +                output += lowpass_width * 2;
> +            }
> +
> +            output = s->plane[plane].subband[0];
> +            for (i = 0; i < lowpass_height * 2; i++) {
> +                for (j = 0; j < lowpass_width * 2; j++)
> +                    output[j] *= 4;
> +                output += lowpass_width * 2;
> +            }
> +
> +            low    = s->plane[plane].subband[7];
> +            high   = s->plane[plane].subband[9];
> +            output = s->plane[plane].l_h[3];
> +            for (i = 0; i < lowpass_width; i++) {
> +                vert_filter(output, lowpass_width, low, lowpass_width,
> high, highpass_stride, lowpass_height);
> +                low++;
> +                high++;
> +                output++;
> +            }
> +
> +            low    = s->plane[plane].subband[8];
> +            high   = s->plane[plane].subband[10];
> +            output = s->plane[plane].l_h[4];
> +            for (i = 0; i < lowpass_width; i++) {
> +                vert_filter(output, lowpass_width, low, highpass_stride,
> high, highpass_stride, lowpass_height);
> +                low++;
> +                high++;
> +                output++;
> +            }
> +
> +            low    = s->plane[plane].l_h[3];
> +            high   = s->plane[plane].l_h[4];
> +            output = s->plane[plane].subband[7];
> +            for (i = 0; i < lowpass_height * 2; i++) {
> +                horiz_filter(output, low, high, lowpass_width);
> +                low    += lowpass_width;
> +                high   += lowpass_width;
> +                output += lowpass_width * 2;
> +            }
> +
> +            lowpass_height  = s->plane[plane].band[4][1].height;
> +            lowpass_width   = s->plane[plane].band[4][1].width;
> +            highpass_stride = s->plane[plane].band[4][1].stride;
> +            av_log(avctx, AV_LOG_DEBUG, "temporal level %i %i %i %i\n",
> plane, lowpass_height, lowpass_width, highpass_stride);
> +
> +            if (lowpass_height > s->plane[plane].band[4][1].a_height ||
> lowpass_width > s->plane[plane].band[4][1].a_width ||
> +                !highpass_stride || s->plane[plane].band[4][1].width >
> s->plane[plane].band[4][1].a_width) {
> +                av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
> +                ret = AVERROR(EINVAL);
> +                goto end;
> +            }
> +
> +            low    = s->plane[plane].subband[0];
> +            high   = s->plane[plane].subband[7];
> +            output = s->plane[plane].subband[0];
> +            for (i = 0; i < lowpass_height; i++) {
> +                inverse_temporal_filter(output, low, high, lowpass_width,
> 4 * lowpass_width * lowpass_height, 1);
> +                low    += lowpass_width;
> +                high   += lowpass_width;
> +            }
> +            if (s->progressive) {
> +                low    = s->plane[plane].subband[0];
> +                high   = s->plane[plane].subband[15];
> +                output = s->plane[plane].l_h[6];
> +                for (i = 0; i < lowpass_width; i++) {
> +                    vert_filter(output, lowpass_width, low,
> lowpass_width, high, highpass_stride, lowpass_height);
> +                    low++;
> +                    high++;
> +                    output++;
> +                }
> +
> +                low    = s->plane[plane].subband[14];
> +                high   = s->plane[plane].subband[16];
> +                output = s->plane[plane].l_h[7];
> +                for (i = 0; i < lowpass_width; i++) {
> +                    vert_filter(output, lowpass_width, low,
> highpass_stride, high, highpass_stride, lowpass_height);
> +                    low++;
> +                    high++;
> +                    output++;
> +                }
> +
> +                low    = s->plane[plane].subband[7];
> +                high   = s->plane[plane].subband[12];
> +                output = s->plane[plane].l_h[8];
> +                for (i = 0; i < lowpass_width; i++) {
> +                    vert_filter(output, lowpass_width, low,
> lowpass_width, high, highpass_stride, lowpass_height);
> +                    low++;
> +                    high++;
> +                    output++;
> +                }
> +
> +                low    = s->plane[plane].subband[11];
> +                high   = s->plane[plane].subband[13];
> +                output = s->plane[plane].l_h[9];
> +                for (i = 0; i < lowpass_width; i++) {
> +                    vert_filter(output, lowpass_width, low,
> highpass_stride, high, highpass_stride, lowpass_height);
> +                    low++;
> +                    high++;
> +                    output++;
> +                }
> +
> +                dst = (int16_t *)pic->data[act_plane];
> +                low  = s->plane[plane].l_h[6];
> +                high = s->plane[plane].l_h[7];
> +                for (i = 0; i < lowpass_height * 2; i++) {
> +                    horiz_filter(dst, low, high, lowpass_width);
> +                    low  += lowpass_width;
> +                    high += lowpass_width;
> +                    dst  += pic->linesize[act_plane] / 2;
> +                }
> +            } else {
> +                pic->interlaced_frame = 1;
> +                low    = s->plane[plane].subband[0];
> +                high   = s->plane[plane].subband[14];
> +                output = s->plane[plane].l_h[6];
> +                for (i = 0; i < lowpass_height; i++) {
> +                    horiz_filter(output, low, high, lowpass_width);
> +                    low    += lowpass_width;
> +                    high   += lowpass_width;
> +                    output += lowpass_width * 2;
> +                }
> +
> +                low    = s->plane[plane].subband[15];
> +                high   = s->plane[plane].subband[16];
> +                output = s->plane[plane].l_h[7];
> +                for (i = 0; i < lowpass_height; i++) {
> +                    horiz_filter(output, low, high, lowpass_width);
> +                    low    += lowpass_width;
> +                    high   += lowpass_width;
> +                    output += lowpass_width * 2;
> +                }
> +
> +                low    = s->plane[plane].subband[7];
> +                high   = s->plane[plane].subband[11];
> +                output = s->plane[plane].l_h[8];
> +                for (i = 0; i < lowpass_height; i++) {
> +                    horiz_filter(output, low, high, lowpass_width);
> +                    low    += lowpass_width;
> +                    high   += lowpass_width;
> +                    output += lowpass_width * 2;
> +                }
> +
> +                low    = s->plane[plane].subband[12];
> +                high   = s->plane[plane].subband[13];
> +                output = s->plane[plane].l_h[9];
> +                for (i = 0; i < lowpass_height; i++) {
> +                    horiz_filter(output, low, high, lowpass_width);
> +                    low    += lowpass_width;
> +                    high   += lowpass_width;
> +                    output += lowpass_width * 2;
> +                }
> +
> +
> +                dst  = (int16_t *)pic->data[act_plane];
> +                low  = s->plane[plane].l_h[6];
> +                high = s->plane[plane].l_h[7];
> +                for (i = 0; i < lowpass_height; i++) {
> +                    inverse_temporal_filter(dst, low, high, lowpass_width
> * 2,  pic->linesize[act_plane]/2, 0);
> +                    low  += lowpass_width * 2;
> +                    high += lowpass_width * 2;
> +                    dst  += pic->linesize[act_plane];
> +                }
> +            }
> +        }
> +    } else if (s->sample_type == 1) {
> +        int16_t *low, *high, *dst;
> +        int lowpass_height, lowpass_width;
> +        for (plane = 0; plane < planes && !ret; plane++) {
> +            int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
> +            lowpass_height  = s->plane[plane].band[4][1].height;
> +            lowpass_width   = s->plane[plane].band[4][1].width;
> +            if (s->progressive) {
> +                dst = (int16_t *)pic->data[act_plane];
> +                low  = s->plane[plane].l_h[8];
> +                high = s->plane[plane].l_h[9];
> +                for (i = 0; i < lowpass_height * 2; i++) {
> +                    horiz_filter(dst, low, high, lowpass_width);
> +                    low  += lowpass_width;
> +                    high += lowpass_width;
> +                    dst  += pic->linesize[act_plane] / 2;
> +                }
> +            } else {
> +                dst  = (int16_t *)pic->data[act_plane];
> +                low  = s->plane[plane].l_h[8];
> +                high = s->plane[plane].l_h[9];
> +                for (i = 0; i < lowpass_height; i++) {
> +                    inverse_temporal_filter(dst, low, high, lowpass_width
> * 2,  pic->linesize[act_plane]/2, 0);
> +                    low  += lowpass_width * 2;
> +                    high += lowpass_width * 2;
> +                    dst  += pic->linesize[act_plane];
> +                }
> +            }
> +        }
> +    }
>
>  end:
>      if (ret < 0)
> @@ -961,6 +1342,6 @@ AVCodec ff_cfhd_decoder = {
>      .init             = cfhd_init,
>      .close            = cfhd_close,
>      .decode           = cfhd_decode,
> -    .capabilities     = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
> -    .caps_internal    = FF_CODEC_CAP_INIT_THREADSAFE |
> FF_CODEC_CAP_INIT_CLEANUP,
> +    .capabilities     = AV_CODEC_CAP_DR1,
> +    .caps_internal    = FF_CODEC_CAP_INIT_CLEANUP,
>  };
> diff --git a/libavcodec/cfhd.h b/libavcodec/cfhd.h
> index 4f2c82d8bc..047c0f2028 100644
> --- a/libavcodec/cfhd.h
> +++ b/libavcodec/cfhd.h
> @@ -31,15 +31,14 @@
>  #include "vlc.h"
>
>  #define VLC_BITS       9
> -#define SUBBAND_COUNT 10
> -
> +#define SUBBAND_COUNT 17
>  typedef struct CFHD_RL_VLC_ELEM {
>      int16_t level;
>      int8_t len;
>      uint16_t run;
>  } CFHD_RL_VLC_ELEM;
>
> -#define DWT_LEVELS 3
> +#define DWT_LEVELS 6
>
>  typedef struct SubBand {
>      int level;
> @@ -64,7 +63,7 @@ typedef struct Plane {
>
>      /* TODO: merge this into SubBand structure */
>      int16_t *subband[SUBBAND_COUNT];
> -    int16_t *l_h[8];
> +    int16_t *l_h[10];
>
>      SubBand band[DWT_LEVELS][4];
>  } Plane;
> @@ -83,9 +82,13 @@ typedef struct CFHDContext {
>
>      CFHD_RL_VLC_ELEM table_18_rl_vlc[4572];
>      VLC vlc_18;
> -
>      GetBitContext gb;
>
> +    int sample_type;
> +    int transform_type;
> +    int encode_method;
> +    int first_wavelet;
> +    int pframe;
>      int coded_width;
>      int coded_height;
>      int cropped_height;
> --
> 2.17.1
>
>
> From d6ac8a4ee2f08a6d3cab7e7500d740c661062c9f Mon Sep 17 00:00:00 2001
> From: Gagandeep Singh <deepgagan231197@gmail.com>
> Date: Tue, 14 Aug 2018 00:20:06 +0530
> Subject: [GSOC][FFmpeg-devel][PATCH 2/3] lavc/cfhd:corrected decompanding
> for table 9 used in cfhd
>
> ---
>  libavcodec/cfhd.c | 25 ++++++++++++++++++++++++-
>  1 file changed, 24 insertions(+), 1 deletion(-)
>
> diff --git a/libavcodec/cfhd.c b/libavcodec/cfhd.c
> index 3906d7a86d..2c538f0bbd 100644
> --- a/libavcodec/cfhd.c
> +++ b/libavcodec/cfhd.c
> @@ -114,7 +114,30 @@ static inline int dequant_and_decompand(int level,
> int quantisation, int codeboo
>  {
>      if (lossless)
>          return level;
> -    if (codebook == 0 || codebook == 1) {
> +    if (codebook == 0) {
> +        if (level >= 40 && level < 264) {
> +            if (level >= 54) {
> +                level  -= 54;
> +                level <<= 2;
> +                level  += 54;
> +            }
> +            level  -= 40;
> +            level <<= 2;
> +            level  += 40;
> +        } else if (level <= -40) {
> +            level = -level;
> +            if (level >= 54) {
> +                level  -= 54;
> +                level <<= 2;
> +                level  += 54;
> +            }
> +            level  -= 40;
> +            level <<= 2;
> +            level  += 40;
> +            level   = -level;
> +        }
> +        return level * quantisation;
> +    } else if (codebook == 1) {
>          int64_t abslevel = abs(level);
>          if (level < 264)
>              return (abslevel + ((768 * abslevel * abslevel * abslevel) /
> (255 * 255 * 255))) *
> --
> 2.17.1
>
>
> From fa23549c61a6d8413cdc79c570376c53795a6ff1 Mon Sep 17 00:00:00 2001
> From: Gagandeep Singh <deepgagan231197@gmail.com>
> Date: Tue, 14 Aug 2018 12:43:20 +0530
> Subject: [GSOC][FFmpeg-devel][PATCH 3/3] lavc/cfhd:frame threading support
> for 3d transform
>  progressive and interlaced samples
>
> ---
>  libavcodec/cfhd.c | 378 ++++++++++++++++++++++++++++------------------
>  libavcodec/cfhd.h |   8 +-
>  2 files changed, 242 insertions(+), 144 deletions(-)
>
> diff --git a/libavcodec/cfhd.c b/libavcodec/cfhd.c
> index 2c538f0bbd..7c298056ca 100644
> --- a/libavcodec/cfhd.c
> +++ b/libavcodec/cfhd.c
> @@ -63,13 +63,23 @@ enum CFHDParam {
>
>  static av_cold int cfhd_init(AVCodecContext *avctx)
>  {
> +    int ret;
> +
>      CFHDContext *s = avctx->priv_data;
> +    if (!avctx->internal->is_copy) {
> +        avctx->internal->allocate_progress = 1;
> +        ret = ff_cfhd_init_vlcs(s);
> +    } else
> +        ret = 0;
>
>      avctx->bits_per_raw_sample = 10;
>      s->avctx                   = avctx;
>      s->progressive             = 0;
> +    s->i_frame.f = av_frame_alloc();
> +    s->p_frame.f = av_frame_alloc();
>
> -    return ff_cfhd_init_vlcs(s);
> +
> +    return ret;
>  }
>
>  static void init_plane_defaults(CFHDContext *s)
> @@ -268,15 +278,18 @@ static void free_buffers(CFHDContext *s)
>      for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
>          av_freep(&s->plane[i].idwt_buf);
>          av_freep(&s->plane[i].idwt_tmp);
> -        if (s->transform_type == 0)
> +        if (s->transform_type == 0) {
>              for (j = 0; j < 9; j++)
>                  s->plane[i].subband[j] = NULL;
> -        else
> +            for (j = 0; j < 8; j++)
> +                s->plane[i].l_h[j] = NULL;
> +        }
> +        else {
>              for (j = 0; j < 17; j++)
>                  s->plane[i].subband[j] = NULL;
> -
> -        for (j = 0; j < 8; j++)
> -            s->plane[i].l_h[j] = NULL;
> +            for (j = 0; j < 12; j++)
> +                s->plane[i].l_h[j] = NULL;
> +        }
>      }
>      s->a_height = 0;
>      s->a_width  = 0;
> @@ -394,8 +407,10 @@ static int alloc_buffers(AVCodecContext *avctx)
>          s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
>          if (s->transform_type == 2) {
>              frame2 = s->plane[i].idwt_tmp + 4 * w2 * h2;
> -            s->plane[i].l_h[8] = frame2;
> -            s->plane[i].l_h[9] = frame2 + 2 * w2 * h2;
> +            s->plane[i].l_h[8]  = frame2;
> +            s->plane[i].l_h[9]  = frame2 + 2 * w4 * h4;
> +            s->plane[i].l_h[10] = frame2;
> +            s->plane[i].l_h[11] = frame2 + 2 * w2 * h2;
>              }
>      }
>
> @@ -406,14 +421,28 @@ static int alloc_buffers(AVCodecContext *avctx)
>      return 0;
>  }
>
> +static int update_thread_context(AVCodecContext *dst, const
> AVCodecContext *src)
> +{
> +    CFHDContext *csrc = src->priv_data;
> +    CFHDContext *cdst = dst->priv_data;
> +    cdst->transform_type = csrc->transform_type;
> +    if (csrc->sample_type != 1 && csrc->transform_type != 0) {
> +        cdst->progressive = csrc->progressive;
> +        cdst->picture = &csrc->p_frame;
> +        cdst->connection = &csrc->i_frame;
> +        cdst->buffers = csrc->plane;
> +    }
> +
> +    return 0;
> +}
> +
>  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
>                         AVPacket *avpkt)
>  {
>      CFHDContext *s = avctx->priv_data;
>      GetByteContext gb;
>      ThreadFrame frame = { .f = data };
> -    AVFrame *pic = data;
> -    int ret = 0, i, j, planes, plane, got_buffer = 0;
> +    int ret = 0, i, j, planes, plane, got_buffer = 0, progress1 = 1,
> progress2 = 1;
>      int16_t *coeff_data;
>
>      s->coded_format = AV_PIX_FMT_YUV422P10;
> @@ -537,7 +566,9 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>          } else if (tag == 1) {
>              s->sample_type = data;
>              if (data == 2)
> -                s->pframe = 1;
> +                s->pframe  = 1;
> +            else if (data == 1)
> +                s->transform_type = 2;
>              av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
>          } else if (tag == 10) {
>              s->transform_type = data;
> @@ -657,21 +688,54 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                      return ret;
>                  }
>              }
> -            ret = ff_set_dimensions(avctx, s->coded_width,
> s->coded_height);
> -            if (ret < 0)
> -                return ret;
> -            if (s->cropped_height)
> -                avctx->height = s->cropped_height;
> -            frame.f->width =
> -            frame.f->height = 0;
> -
> -            if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
> -                return ret;
> -
> +            if (s->transform_type == 2) {
> +                if (s->sample_type != 1) {
> +                    s->picture = &s->i_frame;
> +                    s->connection = &s->p_frame;
> +                    s->buffers = s->plane;
> +                }
> +                ret = ff_set_dimensions(avctx, s->coded_width,
> s->coded_height);
> +                if (ret < 0)
> +                    return ret;
> +                if (s->sample_type != 1) {
> +                    if (s->i_frame.f->data[0])
> +                        ff_thread_release_buffer(avctx, &s->i_frame);
> +                    if (s->p_frame.f->data[0])
> +                        ff_thread_release_buffer(avctx, &s->p_frame);
> +                    av_frame_copy_props(s->i_frame.f, frame.f);
> +                    av_frame_copy_props(s->p_frame.f, frame.f);
> +                    if (s->cropped_height)
> +                        avctx->height = s->cropped_height;
> +                    s->picture->f->width =
> +                    s->picture->f->height = 0;
> +                    s->connection->f->width =
> +                    s->connection->f->height = 0;
> +                    if ((ret = ff_thread_get_buffer(avctx, s->picture,
> 0)) < 0)
> +                        return ret;
> +                    if ((ret = ff_thread_get_buffer(avctx, s->connection,
> 0)) < 0)
> +                        return ret;
> +                }
> +            } else {
> +                s->picture = &s->i_frame;
> +                s->buffers = s->plane;
> +                if (s->picture->f->data[0])
> +                    ff_thread_release_buffer(avctx, s->picture);
> +                av_frame_copy_props(s->i_frame.f, frame.f);
> +                ret = ff_set_dimensions(avctx, s->coded_width,
> s->coded_height);
> +                if (ret < 0)
> +                    return ret;
> +                if (s->cropped_height)
> +                    avctx->height = s->cropped_height;
> +                s->picture->f->width =
> +                s->picture->f->height = 0;
> +                if ((ret = ff_thread_get_buffer(avctx, s->picture, 0)) <
> 0)
> +                    return ret;
> +            }
>              s->coded_width = 0;
>              s->coded_height = 0;
>              s->coded_format = AV_PIX_FMT_NONE;
>              got_buffer = 1;
> +            ff_thread_finish_setup(avctx);
>          }
>          coeff_data =
> s->plane[s->channel_num].subband[s->subband_num_actual];
>
> @@ -835,6 +899,8 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                         &coeff_data[(highpass_height - 1) *
> highpass_stride],
>                         highpass_stride * sizeof(*coeff_data));
>              }
> +            if (s->transform_type == 2 && s->subband_num_actual == 10)
> +                ff_thread_report_progress(s->picture, progress1 += 1, 0);
>          }
>      }
>      //disabled to run mountain sample file
> @@ -975,7 +1041,6 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>              ret = AVERROR(EINVAL);
>              goto end;
>          }
> -
>          av_log(avctx, AV_LOG_DEBUG, "Level 3 plane %i %i %i %i\n", plane,
> lowpass_height, lowpass_width, highpass_stride);
>          if (s->progressive) {
>              low    = s->plane[plane].subband[0];
> @@ -998,18 +1063,18 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                  output++;
>              }
>
> -            dst = (int16_t *)pic->data[act_plane];
> +            dst = (int16_t *)s->picture->f->data[act_plane];
>              low  = s->plane[plane].l_h[6];
>              high = s->plane[plane].l_h[7];
>              for (i = 0; i < lowpass_height * 2; i++) {
>                  horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
>                  low  += lowpass_width;
>                  high += lowpass_width;
> -                dst  += pic->linesize[act_plane] / 2;
> +                dst  += s->picture->f->linesize[act_plane] / 2;
>              }
>          } else {
> -            av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d",
> pic->interlaced_frame);
> -            pic->interlaced_frame = 1;
> +            av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d",
> s->picture->f->interlaced_frame);
> +            s->picture->f->interlaced_frame = 1;
>              low    = s->plane[plane].subband[0];
>              high   = s->plane[plane].subband[7];
>              output = s->plane[plane].l_h[6];
> @@ -1030,23 +1095,23 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                  output += lowpass_width * 2;
>              }
>
> -            dst  = (int16_t *)pic->data[act_plane];
> +            dst  = (int16_t *)s->picture->f->data[act_plane];
>              low  = s->plane[plane].l_h[6];
>              high = s->plane[plane].l_h[7];
>              for (i = 0; i < lowpass_height; i++) {
> -                inverse_temporal_filter(dst, low, high, lowpass_width *
> 2,  pic->linesize[act_plane]/2, 0);
> +                inverse_temporal_filter(dst, low, high, lowpass_width *
> 2,  s->picture->f->linesize[act_plane]/2, 0);
>                  low  += lowpass_width * 2;
>                  high += lowpass_width * 2;
> -                dst  += pic->linesize[act_plane];
> +                dst  += s->picture->f->linesize[act_plane];
>              }
>          }
>      }
> -    //this is the serial version on ip sample decoding so buffers
> allocated using alloc_buffers() are not freed,
> -    //so the stored decoded coefficients data is used for generating the
> second frame once empty packet is passed in sample_type = 1
> +    av_frame_ref(frame.f, s->picture->f);
> +    ff_thread_report_progress(s->picture, INT_MAX, 0);
>      } else if (s->transform_type == 2 && s->sample_type != 1) {
>          for (plane = 0; plane < planes && !ret; plane++) {
> -            int lowpass_height  = s->plane[plane].band[0][0].height;
> -            int lowpass_width   = s->plane[plane].band[0][0].width;
> +            int lowpass_height  = s->plane[plane].band[0][1].height;
> +            int lowpass_width   = s->plane[plane].band[0][1].width;
>              int highpass_stride = s->plane[plane].band[0][1].stride;
>              int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
>              int16_t *low, *high, *output, *dst;
> @@ -1058,8 +1123,6 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                  goto end;
>              }
>
> -            av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i
> %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
> -
>              low    = s->plane[plane].subband[0];
>              high   = s->plane[plane].subband[2];
>              output = s->plane[plane].l_h[0];
> @@ -1110,8 +1173,6 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                  goto end;
>              }
>
> -            av_log(avctx, AV_LOG_DEBUG, "Level 2 lowpass plane %i %i %i
> %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
> -
>              low    = s->plane[plane].subband[0];
>              high   = s->plane[plane].subband[5];
>              output = s->plane[plane].l_h[3];
> @@ -1149,40 +1210,9 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                  output += lowpass_width * 2;
>              }
>
> -            low    = s->plane[plane].subband[7];
> -            high   = s->plane[plane].subband[9];
> -            output = s->plane[plane].l_h[3];
> -            for (i = 0; i < lowpass_width; i++) {
> -                vert_filter(output, lowpass_width, low, lowpass_width,
> high, highpass_stride, lowpass_height);
> -                low++;
> -                high++;
> -                output++;
> -            }
> -
> -            low    = s->plane[plane].subband[8];
> -            high   = s->plane[plane].subband[10];
> -            output = s->plane[plane].l_h[4];
> -            for (i = 0; i < lowpass_width; i++) {
> -                vert_filter(output, lowpass_width, low, highpass_stride,
> high, highpass_stride, lowpass_height);
> -                low++;
> -                high++;
> -                output++;
> -            }
> -
> -            low    = s->plane[plane].l_h[3];
> -            high   = s->plane[plane].l_h[4];
> -            output = s->plane[plane].subband[7];
> -            for (i = 0; i < lowpass_height * 2; i++) {
> -                horiz_filter(output, low, high, lowpass_width);
> -                low    += lowpass_width;
> -                high   += lowpass_width;
> -                output += lowpass_width * 2;
> -            }
> -
>              lowpass_height  = s->plane[plane].band[4][1].height;
>              lowpass_width   = s->plane[plane].band[4][1].width;
>              highpass_stride = s->plane[plane].band[4][1].stride;
> -            av_log(avctx, AV_LOG_DEBUG, "temporal level %i %i %i %i\n",
> plane, lowpass_height, lowpass_width, highpass_stride);
>
>              if (lowpass_height > s->plane[plane].band[4][1].a_height ||
> lowpass_width > s->plane[plane].band[4][1].a_width ||
>                  !highpass_stride || s->plane[plane].band[4][1].width >
> s->plane[plane].band[4][1].a_width) {
> @@ -1190,7 +1220,7 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                  ret = AVERROR(EINVAL);
>                  goto end;
>              }
> -
> +            ff_thread_await_progress(s->connection, progress2 += 1, 0);
>              low    = s->plane[plane].subband[0];
>              high   = s->plane[plane].subband[7];
>              output = s->plane[plane].subband[0];
> @@ -1199,6 +1229,7 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                  low    += lowpass_width;
>                  high   += lowpass_width;
>              }
> +            ff_thread_report_progress(s->picture, progress1 += 1, 0);
>              if (s->progressive) {
>                  low    = s->plane[plane].subband[0];
>                  high   = s->plane[plane].subband[15];
> @@ -1220,37 +1251,17 @@ static int cfhd_decode(AVCodecContext *avctx, void
> *data, int *got_frame,
>                      output++;
>                  }
>
> -                low    = s->plane[plane].subband[7];
> -                high   = s->plane[plane].subband[12];
> -                output = s->plane[plane].l_h[8];
> -                for (i = 0; i < lowpass_width; i++) {
> -                    vert_filter(output, lowpass_width, low,
> lowpass_width, high, highpass_stride, lowpass_height);
> -                    low++;
> -                    high++;
> -                    output++;
> -                }
> -
> -                low    = s->plane[plane].subband[11];
> -                high   = s->plane[plane].subband[13];
> -                output = s->plane[plane].l_h[9];
> -                for (i = 0; i < lowpass_width; i++) {
> -                    vert_filter(output, lowpass_width, low,
> highpass_stride, high, highpass_stride, lowpass_height);
> -                    low++;
> -                    high++;
> -                    output++;
> -                }
> -
> -                dst = (int16_t *)pic->data[act_plane];
> +                dst = (int16_t *)s->picture->f->data[act_plane];
>                  low  = s->plane[plane].l_h[6];
>                  high = s->plane[plane].l_h[7];
>                  for (i = 0; i < lowpass_height * 2; i++) {
> -                    horiz_filter(dst, low, high, lowpass_width);
> +                    horiz_filter_clip(dst, low, high, lowpass_width,
> s->bpc);
>                      low  += lowpass_width;
>                      high += lowpass_width;
> -                    dst  += pic->linesize[act_plane] / 2;
> +                    dst  += s->picture->f->linesize[act_plane] / 2;
>                  }
>              } else {
> -                pic->interlaced_frame = 1;
> +                s->picture->f->interlaced_frame = 1;
>                  low    = s->plane[plane].subband[0];
>                  high   = s->plane[plane].subband[14];
>                  output = s->plane[plane].l_h[6];
> @@ -1271,67 +1282,137 @@ static int cfhd_decode(AVCodecContext *avctx,
> void *data, int *got_frame,
>                      output += lowpass_width * 2;
>                  }
>
> -                low    = s->plane[plane].subband[7];
> -                high   = s->plane[plane].subband[11];
> -                output = s->plane[plane].l_h[8];
> -                for (i = 0; i < lowpass_height; i++) {
> -                    horiz_filter(output, low, high, lowpass_width);
> -                    low    += lowpass_width;
> -                    high   += lowpass_width;
> -                    output += lowpass_width * 2;
> -                }
> -
> -                low    = s->plane[plane].subband[12];
> -                high   = s->plane[plane].subband[13];
> -                output = s->plane[plane].l_h[9];
> -                for (i = 0; i < lowpass_height; i++) {
> -                    horiz_filter(output, low, high, lowpass_width);
> -                    low    += lowpass_width;
> -                    high   += lowpass_width;
>

discard this thread, really sorry for my skills in sending patches

Gagandeep Singh
diff mbox

Patch

diff --git a/libavcodec/cfhd.c b/libavcodec/cfhd.c
index 76838f0869..3906d7a86d 100644
--- a/libavcodec/cfhd.c
+++ b/libavcodec/cfhd.c
@@ -41,12 +41,15 @@ 
 #define ALPHA_COMPAND_GAIN 9400
 
 enum CFHDParam {
+    TransformType    =  10,
     ChannelCount     =  12,
     SubbandCount     =  14,
+    Pframe           =  19,
     ImageWidth       =  20,
     ImageHeight      =  21,
     LowpassPrecision =  35,
     SubbandNumber    =  48,
+    EncodingMethod   =  52,
     Quantization     =  53,
     ChannelNumber    =  62,
     SampleFlags      =  68,
@@ -64,6 +67,7 @@  static av_cold int cfhd_init(AVCodecContext *avctx)
 
     avctx->bits_per_raw_sample = 10;
     s->avctx                   = avctx;
+    s->progressive             = 0;
 
     return ff_cfhd_init_vlcs(s);
 }
@@ -84,6 +88,10 @@  static void init_peak_table_defaults(CFHDContext *s)
 
 static void init_frame_defaults(CFHDContext *s)
 {
+    s->sample_type       = 0;
+    s->transform_type    = 0;
+    s->pframe            = 0;
+    s->first_wavelet     = 0;
     s->coded_width       = 0;
     s->coded_height      = 0;
     s->cropped_height    = 0;
@@ -97,14 +105,15 @@  static void init_frame_defaults(CFHDContext *s)
     s->pshift            = 1;
     s->codebook          = 0;
     s->difference_coding = 0;
-    s->progressive       = 0;
     init_plane_defaults(s);
     init_peak_table_defaults(s);
 }
 
 /* TODO: merge with VLC tables or use LUT */
-static inline int dequant_and_decompand(int level, int quantisation, int codebook)
+static inline int dequant_and_decompand(int level, int quantisation, int codebook, int lossless)
 {
+    if (lossless)
+        return level;
     if (codebook == 0 || codebook == 1) {
         int64_t abslevel = abs(level);
         if (level < 264)
@@ -193,16 +202,21 @@  static inline void filter(int16_t *output, ptrdiff_t out_stride,
     }
 }
 
-static inline void interlaced_vertical_filter(int16_t *output, int16_t *low, int16_t *high,
-                         int width, int linesize, int plane)
+static inline void inverse_temporal_filter(int16_t *output, int16_t *low, int16_t *high,
+                         int width, int linesize, int temporal_for_highpass)
 {
     int i;
     int16_t even, odd;
     for (i = 0; i < width; i++) {
         even = (low[i] - high[i])/2;
         odd  = (low[i] + high[i])/2;
-        output[i]            = av_clip_uintp2(even, 10);
-        output[i + linesize] = av_clip_uintp2(odd, 10);
+        if (!temporal_for_highpass) {
+            output[i]            = av_clip_uintp2(even, 10);
+            output[i + linesize] = av_clip_uintp2(odd, 10);
+        } else {
+            low[i]  = even;
+            high[i] = odd;
+        }
     }
 }
 static void horiz_filter(int16_t *output, int16_t *low, int16_t *high,
@@ -231,9 +245,12 @@  static void free_buffers(CFHDContext *s)
     for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
         av_freep(&s->plane[i].idwt_buf);
         av_freep(&s->plane[i].idwt_tmp);
-
-        for (j = 0; j < 9; j++)
-            s->plane[i].subband[j] = NULL;
+        if (s->transform_type == 0)
+            for (j = 0; j < 9; j++)
+                s->plane[i].subband[j] = NULL;
+        else
+            for (j = 0; j < 17; j++)
+                s->plane[i].subband[j] = NULL;
 
         for (j = 0; j < 8; j++)
             s->plane[i].l_h[j] = NULL;
@@ -247,7 +264,7 @@  static int alloc_buffers(AVCodecContext *avctx)
     CFHDContext *s = avctx->priv_data;
     int i, j, ret, planes;
     int chroma_x_shift, chroma_y_shift;
-    unsigned k;
+    unsigned k, t;
 
     if ((ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height)) < 0)
         return ret;
@@ -261,6 +278,7 @@  static int alloc_buffers(AVCodecContext *avctx)
 
     for (i = 0; i < planes; i++) {
         int w8, h8, w4, h4, w2, h2;
+        int16_t *frame2;
         int width  = i ? avctx->width  >> chroma_x_shift : avctx->width;
         int height = i ? avctx->height >> chroma_y_shift : avctx->height;
         ptrdiff_t stride = FFALIGN(width  / 8, 8) * 8;
@@ -277,28 +295,68 @@  static int alloc_buffers(AVCodecContext *avctx)
         w2 = w4 * 2;
         h2 = h4 * 2;
 
-        s->plane[i].idwt_buf =
-            av_mallocz_array(height * stride, sizeof(*s->plane[i].idwt_buf));
-        s->plane[i].idwt_tmp =
-            av_malloc_array(height * stride, sizeof(*s->plane[i].idwt_tmp));
-        if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
-            return AVERROR(ENOMEM);
-
-        s->plane[i].subband[0] = s->plane[i].idwt_buf;
-        s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
-        s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
-        s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
-        s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
-        s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
-        s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
-        s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
-        s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
-        s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
-
-        for (j = 0; j < DWT_LEVELS; j++) {
-            for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
-                s->plane[i].band[j][k].a_width  = w8 << j;
-                s->plane[i].band[j][k].a_height = h8 << j;
+        if (s->transform_type == 0) {
+            s->plane[i].idwt_buf =
+                av_mallocz_array(height * stride, sizeof(*s->plane[i].idwt_buf));
+            s->plane[i].idwt_tmp =
+                av_malloc_array(height * stride, sizeof(*s->plane[i].idwt_tmp));
+            if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
+                return AVERROR(ENOMEM);
+        } else if (s->transform_type == 2) {
+            s->plane[i].idwt_buf =
+                av_mallocz_array(2 * height * stride, sizeof(*s->plane[i].idwt_buf));
+            s->plane[i].idwt_tmp =
+                av_malloc_array(2 * height * stride, sizeof(*s->plane[i].idwt_tmp));
+            if (!s->plane[i].idwt_buf || !s->plane[i].idwt_tmp)
+                return AVERROR(ENOMEM);
+        }
+
+        if (s->transform_type == 0) {
+            s->plane[i].subband[0] = s->plane[i].idwt_buf;
+            s->plane[i].subband[1] = s->plane[i].idwt_buf + 2 * w8 * h8;
+            s->plane[i].subband[2] = s->plane[i].idwt_buf + 1 * w8 * h8;
+            s->plane[i].subband[3] = s->plane[i].idwt_buf + 3 * w8 * h8;
+            s->plane[i].subband[4] = s->plane[i].idwt_buf + 2 * w4 * h4;
+            s->plane[i].subband[5] = s->plane[i].idwt_buf + 1 * w4 * h4;
+            s->plane[i].subband[6] = s->plane[i].idwt_buf + 3 * w4 * h4;
+            s->plane[i].subband[7] = s->plane[i].idwt_buf + 2 * w2 * h2;
+            s->plane[i].subband[8] = s->plane[i].idwt_buf + 1 * w2 * h2;
+            s->plane[i].subband[9] = s->plane[i].idwt_buf + 3 * w2 * h2;
+        } else if (s->transform_type == 2) {
+            s->plane[i].subband[0]  = s->plane[i].idwt_buf;
+            s->plane[i].subband[1]  = s->plane[i].idwt_buf + 2 * w8 * h8;
+            s->plane[i].subband[2]  = s->plane[i].idwt_buf + 1 * w8 * h8;
+            s->plane[i].subband[3]  = s->plane[i].idwt_buf + 3 * w8 * h8;
+            s->plane[i].subband[4]  = s->plane[i].idwt_buf + 2 * w4 * h4;
+            s->plane[i].subband[5]  = s->plane[i].idwt_buf + 1 * w4 * h4;
+            s->plane[i].subband[6]  = s->plane[i].idwt_buf + 3 * w4 * h4;
+            frame2 =
+            s->plane[i].subband[7]  = s->plane[i].idwt_buf + 4 * w2 * h2;
+            s->plane[i].subband[8]  = frame2 + 2 * w4 * h4;
+            s->plane[i].subband[9]  = frame2 + 1 * w4 * h4;
+            s->plane[i].subband[10] = frame2 + 3 * w4 * h4;
+            s->plane[i].subband[11] = frame2 + 2 * w2 * h2;
+            s->plane[i].subband[12] = frame2 + 1 * w2 * h2;
+            s->plane[i].subband[13] = frame2 + 3 * w2 * h2;
+            s->plane[i].subband[14] = s->plane[i].idwt_buf + 2 * w2 * h2;
+            s->plane[i].subband[15] = s->plane[i].idwt_buf + 1 * w2 * h2;
+            s->plane[i].subband[16] = s->plane[i].idwt_buf + 3 * w2 * h2;
+        }
+
+        if (s->transform_type == 0) {
+            for (j = 0; j < DWT_LEVELS - 3; j++) {
+                for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[j]); k++) {
+                    s->plane[i].band[j][k].a_width  = w8 << j;
+                    s->plane[i].band[j][k].a_height = h8 << j;
+                }
+            }
+        } else if (s->transform_type == 2) {
+            for (j = 0; j < DWT_LEVELS; j++) {
+                t = j < 1 ? 0 : (j < 3 ? 1 : 2);
+                for (k = 0; k < FF_ARRAY_ELEMS(s->plane[i].band[0]); k++) {
+                    s->plane[i].band[j][k].a_width  = w8 << t;
+                    s->plane[i].band[j][k].a_height = h8 << t;
+                }
             }
         }
 
@@ -311,6 +369,11 @@  static int alloc_buffers(AVCodecContext *avctx)
         // s->plane[i].l_h[5] = ll1;
         s->plane[i].l_h[6] = s->plane[i].idwt_tmp;
         s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
+        if (s->transform_type == 2) {
+            frame2 = s->plane[i].idwt_tmp + 4 * w2 * h2;
+            s->plane[i].l_h[8] = frame2;
+            s->plane[i].l_h[9] = frame2 + 2 * w2 * h2;
+            }
     }
 
     s->a_height = s->coded_height;
@@ -349,6 +412,9 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
         } else if (tag == SampleFlags) {
             av_log(avctx, AV_LOG_DEBUG, "Progressive?%"PRIu16"\n", data);
             s->progressive = data & 0x0001;
+        } else if (tag == Pframe) {
+            s->pframe = 1;
+            av_log(avctx, AV_LOG_DEBUG, "Frame type %"PRIu16"\n", data);
         } else if (tag == ImageWidth) {
             av_log(avctx, AV_LOG_DEBUG, "Width %"PRIu16"\n", data);
             s->coded_width = data;
@@ -373,7 +439,7 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
             }
         } else if (tag == SubbandCount) {
             av_log(avctx, AV_LOG_DEBUG, "Subband Count: %"PRIu16"\n", data);
-            if (data != SUBBAND_COUNT) {
+            if (data != 10 && data != 17) {
                 av_log(avctx, AV_LOG_ERROR, "Subband Count of %"PRIu16" is unsupported\n", data);
                 ret = AVERROR_PATCHWELCOME;
                 break;
@@ -405,7 +471,7 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
         } else if (tag == 51) {
             av_log(avctx, AV_LOG_DEBUG, "Subband number actual %"PRIu16"\n", data);
             s->subband_num_actual = data;
-            if (s->subband_num_actual >= 10) {
+            if (s->subband_num_actual >= 17 && s->subband_num_actual != 255) {
                 av_log(avctx, AV_LOG_ERROR, "Invalid subband number actual\n");
                 ret = AVERROR(EINVAL);
                 break;
@@ -420,9 +486,15 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
             s->prescale_shift[1] = (data >> 3) & 0x7;
             s->prescale_shift[2] = (data >> 6) & 0x7;
             av_log(avctx, AV_LOG_DEBUG, "Prescale shift (VC-5): %x\n", data);
+        } else if (tag == EncodingMethod) {
+            s->encode_method = data;
+            av_log(avctx, AV_LOG_DEBUG, "Encode Method for Subband %d : %x\n",s->subband_num_actual, data);
         } else if (tag == 27) {
             av_log(avctx, AV_LOG_DEBUG, "Lowpass width %"PRIu16"\n", data);
-            if (data < 3 || data > s->plane[s->channel_num].band[0][0].a_width) {
+            if (s->coded_width == 0){
+                s->coded_width = data << 3;
+              }
+                if (data < 3) {
                 av_log(avctx, AV_LOG_ERROR, "Invalid lowpass width\n");
                 ret = AVERROR(EINVAL);
                 break;
@@ -431,20 +503,21 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
             s->plane[s->channel_num].band[0][0].stride = data;
         } else if (tag == 28) {
             av_log(avctx, AV_LOG_DEBUG, "Lowpass height %"PRIu16"\n", data);
-            if (data < 3 || data > s->plane[s->channel_num].band[0][0].a_height) {
+            if (s->coded_height == 0)
+                s->coded_height = data << 3;
+            if (data < 3) {
                 av_log(avctx, AV_LOG_ERROR, "Invalid lowpass height\n");
                 ret = AVERROR(EINVAL);
                 break;
             }
             s->plane[s->channel_num].band[0][0].height = data;
-        } else if (tag == 1)
+        } else if (tag == 1) {
+            s->sample_type = data;
+            if (data == 2)
+                s->pframe = 1;
             av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
-        else if (tag == 10) {
-            if (data != 0) {
-                avpriv_report_missing_feature(avctx, "Transform type of %"PRIu16, data);
-                ret = AVERROR_PATCHWELCOME;
-                break;
-            }
+        } else if (tag == 10) {
+            s->transform_type = data;
             av_log(avctx, AV_LOG_DEBUG, "Transform-type? %"PRIu16"\n", data);
         } else if (abstag >= 0x4000 && abstag <= 0x40ff) {
             if (abstag == 0x4001)
@@ -551,8 +624,8 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
             av_log(avctx, AV_LOG_DEBUG,  "Unknown tag %i data %x\n", tag, data);
 
         /* Some kind of end of header tag */
-        if (tag == 4 && data == 0x1a4a && s->coded_width && s->coded_height &&
-            s->coded_format != AV_PIX_FMT_NONE) {
+        if (((tag == 4 && (data == 0x1a4a || data == 0xf0f)) || s->sample_type == 1) && s->coded_width && s->coded_height &&
+            s->coded_format != AV_PIX_FMT_NONE && s->sample_type != 3 && s->sample_type != 6) {
             if (s->a_width != s->coded_width || s->a_height != s->coded_height ||
                 s->a_format != s->coded_format) {
                 free_buffers(s);
@@ -619,8 +692,7 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
 
             av_log(avctx, AV_LOG_DEBUG, "Lowpass coefficients %d\n", lowpass_width * lowpass_height);
         }
-
-        if (tag == 55 && s->subband_num_actual != 255 && s->a_width && s->a_height) {
+        if ((tag == 55 || tag == 82) && s->a_width && s->a_height) {
             int highpass_height = s->plane[s->channel_num].band[s->level][s->subband_num].height;
             int highpass_width  = s->plane[s->channel_num].band[s->level][s->subband_num].width;
             int highpass_a_width = s->plane[s->channel_num].band[s->level][s->subband_num].a_width;
@@ -638,18 +710,24 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
             }
 
             if (highpass_height > highpass_a_height || highpass_width > highpass_a_width || a_expected < highpass_height * (uint64_t)highpass_stride) {
-                av_log(avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
-                ret = AVERROR(EINVAL);
-                goto end;
+                  if (s->subband_num_actual != 255) {
+                      av_log(avctx, AV_LOG_ERROR, "Too many highpass coefficients\n");
+                      ret = AVERROR(EINVAL);
+                      goto end;
+                  }
             }
             expected = highpass_height * highpass_stride;
 
             av_log(avctx, AV_LOG_DEBUG, "Start subband coeffs plane %i level %i codebook %i expected %i\n", s->channel_num, s->level, s->codebook, expected);
 
             init_get_bits(&s->gb, gb.buffer, bytestream2_get_bytes_left(&gb) * 8);
+            if (s->subband_num_actual == 255) {
+                expected = 0;
+                goto finish;
+            }
             {
                 OPEN_READER(re, &s->gb);
-                if (!s->codebook) {
+                if (!s->codebook && !(s->transform_type == 2 && s->subband_num_actual == 7)) {
                     while (1) {
                         UPDATE_CACHE(re, &s->gb);
                         GET_RL_VLC(level, run, re, &s->gb, s->table_9_rl_vlc,
@@ -664,9 +742,14 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                         if (count > expected)
                             break;
 
-                        coeff = dequant_and_decompand(level, s->quantisation, 0);
+                        coeff = dequant_and_decompand(level, s->quantisation, 0, (s->sample_type == 2 || s->sample_type == 3) && s->pframe && s->subband_num_actual == 7 && s->encode_method == 5);
                         for (i = 0; i < run; i++)
-                            *coeff_data++ = coeff;
+                            if (tag != 82)
+                                *coeff_data++  = coeff;
+                            else {
+                                *coeff_data   |= coeff << 8;
+                                *coeff_data++ *= s->quantisation;
+                            }
                     }
                     if (s->peak.level)
                         peak_table(coeff_data - expected, &s->peak, expected);
@@ -688,9 +771,14 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                         if (count > expected)
                             break;
 
-                        coeff = dequant_and_decompand(level, s->quantisation, s->codebook);
+                        coeff = dequant_and_decompand(level, s->quantisation, s->codebook, (s->sample_type == 2 || s->sample_type == 3) && s->pframe && s->subband_num_actual == 7 && s->encode_method == 5);
                         for (i = 0; i < run; i++)
-                            *coeff_data++ = coeff;
+                            if (tag != 82)
+                                *coeff_data++  = coeff;
+                            else {
+                                *coeff_data   |= coeff << 8;
+                                *coeff_data++ *= s->quantisation;
+                            }
                     }
                     if (s->peak.level)
                         peak_table(coeff_data - expected, &s->peak, expected);
@@ -701,12 +789,12 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                 CLOSE_READER(re, &s->gb);
             }
 
-            if (count > expected) {
+            if (count > expected && s->subband_num_actual != 255) {
                 av_log(avctx, AV_LOG_ERROR, "Escape codeword not found, probably corrupt data\n");
                 ret = AVERROR(EINVAL);
                 goto end;
             }
-
+            finish:
             bytes = FFALIGN(AV_CEIL_RSHIFT(get_bits_count(&s->gb), 3), 4);
             if (bytes > bytestream2_get_bytes_left(&gb)) {
                 av_log(avctx, AV_LOG_ERROR, "Bitstream overread error\n");
@@ -726,14 +814,15 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
             }
         }
     }
-
-    if (!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
-        s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) {
+    //disabled to run mountain sample file
+#if 0
+    if ((!s->a_width || !s->a_height || s->a_format == AV_PIX_FMT_NONE ||
+        s->coded_width || s->coded_height || s->coded_format != AV_PIX_FMT_NONE) && s->sample_type != 1) {
         av_log(avctx, AV_LOG_ERROR, "Invalid dimensions\n");
         ret = AVERROR(EINVAL);
         goto end;
     }
-
+#endif
     if (!got_buffer) {
         av_log(avctx, AV_LOG_ERROR, "No end of header tag found\n");
         ret = AVERROR(EINVAL);
@@ -741,8 +830,9 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
     }
 
     planes = av_pix_fmt_count_planes(avctx->pix_fmt);
+    if (s->transform_type == 0 && s->sample_type != 1) {
     for (plane = 0; plane < planes && !ret; plane++) {
-        /* level 1 */
+            /* level 1 */
         int lowpass_height  = s->plane[plane].band[0][0].height;
         int lowpass_width   = s->plane[plane].band[0][0].width;
         int highpass_stride = s->plane[plane].band[0][1].stride;
@@ -921,14 +1011,305 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
             low  = s->plane[plane].l_h[6];
             high = s->plane[plane].l_h[7];
             for (i = 0; i < lowpass_height; i++) {
-                interlaced_vertical_filter(dst, low, high, lowpass_width * 2,  pic->linesize[act_plane]/2, act_plane);
+                inverse_temporal_filter(dst, low, high, lowpass_width * 2,  pic->linesize[act_plane]/2, 0);
                 low  += lowpass_width * 2;
                 high += lowpass_width * 2;
                 dst  += pic->linesize[act_plane];
             }
         }
     }
+    //this is the serial version on ip sample decoding so buffers allocated using alloc_buffers() are not freed,
+    //so the stored decoded coefficients data is used for generating the second frame once empty packet is passed in sample_type = 1
+    } else if (s->transform_type == 2 && s->sample_type != 1) {
+        for (plane = 0; plane < planes && !ret; plane++) {
+            int lowpass_height  = s->plane[plane].band[0][0].height;
+            int lowpass_width   = s->plane[plane].band[0][0].width;
+            int highpass_stride = s->plane[plane].band[0][1].stride;
+            int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
+            int16_t *low, *high, *output, *dst;
+
+            if (lowpass_height > s->plane[plane].band[0][0].a_height || lowpass_width > s->plane[plane].band[0][0].a_width ||
+                !highpass_stride || s->plane[plane].band[0][1].width > s->plane[plane].band[0][1].a_width) {
+                av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
+                ret = AVERROR(EINVAL);
+                goto end;
+            }
+
+            av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
+
+            low    = s->plane[plane].subband[0];
+            high   = s->plane[plane].subband[2];
+            output = s->plane[plane].l_h[0];
+            for (i = 0; i < lowpass_width; i++) {
+                vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
+                low++;
+                high++;
+                output++;
+            }
+
+            low    = s->plane[plane].subband[1];
+            high   = s->plane[plane].subband[3];
+            output = s->plane[plane].l_h[1];
+            for (i = 0; i < lowpass_width; i++) {
+                vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
+                low++;
+                high++;
+                output++;
+            }
+
+            low    = s->plane[plane].l_h[0];
+            high   = s->plane[plane].l_h[1];
+            output = s->plane[plane].subband[0];
+            for (i = 0; i < lowpass_height * 2; i++) {
+                horiz_filter(output, low, high, lowpass_width);
+                low    += lowpass_width;
+                high   += lowpass_width;
+                output += lowpass_width * 2;
+            }
+            if (s->bpc == 12) {
+                output = s->plane[plane].subband[0];
+                for (i = 0; i < lowpass_height * 2; i++) {
+                    for (j = 0; j < lowpass_width * 2; j++)
+                        output[j] *= 4;
+
+                    output += lowpass_width * 2;
+                }
+            }
+
+            lowpass_height  = s->plane[plane].band[1][1].height;
+            lowpass_width   = s->plane[plane].band[1][1].width;
+            highpass_stride = s->plane[plane].band[1][1].stride;
+
+            if (lowpass_height > s->plane[plane].band[1][1].a_height || lowpass_width > s->plane[plane].band[1][1].a_width ||
+                !highpass_stride || s->plane[plane].band[1][1].width > s->plane[plane].band[1][1].a_width) {
+                av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
+                ret = AVERROR(EINVAL);
+                goto end;
+            }
+
+            av_log(avctx, AV_LOG_DEBUG, "Level 2 lowpass plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
+
+            low    = s->plane[plane].subband[0];
+            high   = s->plane[plane].subband[5];
+            output = s->plane[plane].l_h[3];
+            for (i = 0; i < lowpass_width; i++) {
+                vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
+                low++;
+                high++;
+                output++;
+            }
 
+            low    = s->plane[plane].subband[4];
+            high   = s->plane[plane].subband[6];
+            output = s->plane[plane].l_h[4];
+            for (i = 0; i < lowpass_width; i++) {
+                vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
+                low++;
+                high++;
+                output++;
+            }
+
+            low    = s->plane[plane].l_h[3];
+            high   = s->plane[plane].l_h[4];
+            output = s->plane[plane].subband[0];
+            for (i = 0; i < lowpass_height * 2; i++) {
+                horiz_filter(output, low, high, lowpass_width);
+                low    += lowpass_width;
+                high   += lowpass_width;
+                output += lowpass_width * 2;
+            }
+
+            output = s->plane[plane].subband[0];
+            for (i = 0; i < lowpass_height * 2; i++) {
+                for (j = 0; j < lowpass_width * 2; j++)
+                    output[j] *= 4;
+                output += lowpass_width * 2;
+            }
+
+            low    = s->plane[plane].subband[7];
+            high   = s->plane[plane].subband[9];
+            output = s->plane[plane].l_h[3];
+            for (i = 0; i < lowpass_width; i++) {
+                vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
+                low++;
+                high++;
+                output++;
+            }
+
+            low    = s->plane[plane].subband[8];
+            high   = s->plane[plane].subband[10];
+            output = s->plane[plane].l_h[4];
+            for (i = 0; i < lowpass_width; i++) {
+                vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
+                low++;
+                high++;
+                output++;
+            }
+
+            low    = s->plane[plane].l_h[3];
+            high   = s->plane[plane].l_h[4];
+            output = s->plane[plane].subband[7];
+            for (i = 0; i < lowpass_height * 2; i++) {
+                horiz_filter(output, low, high, lowpass_width);
+                low    += lowpass_width;
+                high   += lowpass_width;
+                output += lowpass_width * 2;
+            }
+
+            lowpass_height  = s->plane[plane].band[4][1].height;
+            lowpass_width   = s->plane[plane].band[4][1].width;
+            highpass_stride = s->plane[plane].band[4][1].stride;
+            av_log(avctx, AV_LOG_DEBUG, "temporal level %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
+
+            if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
+                !highpass_stride || s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width) {
+                av_log(avctx, AV_LOG_ERROR, "Invalid plane dimensions\n");
+                ret = AVERROR(EINVAL);
+                goto end;
+            }
+
+            low    = s->plane[plane].subband[0];
+            high   = s->plane[plane].subband[7];
+            output = s->plane[plane].subband[0];
+            for (i = 0; i < lowpass_height; i++) {
+                inverse_temporal_filter(output, low, high, lowpass_width, 4 * lowpass_width * lowpass_height, 1);
+                low    += lowpass_width;
+                high   += lowpass_width;
+            }
+            if (s->progressive) {
+                low    = s->plane[plane].subband[0];
+                high   = s->plane[plane].subband[15];
+                output = s->plane[plane].l_h[6];
+                for (i = 0; i < lowpass_width; i++) {
+                    vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
+                    low++;
+                    high++;
+                    output++;
+                }
+
+                low    = s->plane[plane].subband[14];
+                high   = s->plane[plane].subband[16];
+                output = s->plane[plane].l_h[7];
+                for (i = 0; i < lowpass_width; i++) {
+                    vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
+                    low++;
+                    high++;
+                    output++;
+                }
+
+                low    = s->plane[plane].subband[7];
+                high   = s->plane[plane].subband[12];
+                output = s->plane[plane].l_h[8];
+                for (i = 0; i < lowpass_width; i++) {
+                    vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
+                    low++;
+                    high++;
+                    output++;
+                }
+
+                low    = s->plane[plane].subband[11];
+                high   = s->plane[plane].subband[13];
+                output = s->plane[plane].l_h[9];
+                for (i = 0; i < lowpass_width; i++) {
+                    vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
+                    low++;
+                    high++;
+                    output++;
+                }
+
+                dst = (int16_t *)pic->data[act_plane];
+                low  = s->plane[plane].l_h[6];
+                high = s->plane[plane].l_h[7];
+                for (i = 0; i < lowpass_height * 2; i++) {
+                    horiz_filter(dst, low, high, lowpass_width);
+                    low  += lowpass_width;
+                    high += lowpass_width;
+                    dst  += pic->linesize[act_plane] / 2;
+                }
+            } else {
+                pic->interlaced_frame = 1;
+                low    = s->plane[plane].subband[0];
+                high   = s->plane[plane].subband[14];
+                output = s->plane[plane].l_h[6];
+                for (i = 0; i < lowpass_height; i++) {
+                    horiz_filter(output, low, high, lowpass_width);
+                    low    += lowpass_width;
+                    high   += lowpass_width;
+                    output += lowpass_width * 2;
+                }
+
+                low    = s->plane[plane].subband[15];
+                high   = s->plane[plane].subband[16];
+                output = s->plane[plane].l_h[7];
+                for (i = 0; i < lowpass_height; i++) {
+                    horiz_filter(output, low, high, lowpass_width);
+                    low    += lowpass_width;
+                    high   += lowpass_width;
+                    output += lowpass_width * 2;
+                }
+
+                low    = s->plane[plane].subband[7];
+                high   = s->plane[plane].subband[11];
+                output = s->plane[plane].l_h[8];
+                for (i = 0; i < lowpass_height; i++) {
+                    horiz_filter(output, low, high, lowpass_width);
+                    low    += lowpass_width;
+                    high   += lowpass_width;
+                    output += lowpass_width * 2;
+                }
+
+                low    = s->plane[plane].subband[12];
+                high   = s->plane[plane].subband[13];
+                output = s->plane[plane].l_h[9];
+                for (i = 0; i < lowpass_height; i++) {
+                    horiz_filter(output, low, high, lowpass_width);
+                    low    += lowpass_width;
+                    high   += lowpass_width;
+                    output += lowpass_width * 2;
+                }
+
+
+                dst  = (int16_t *)pic->data[act_plane];
+                low  = s->plane[plane].l_h[6];
+                high = s->plane[plane].l_h[7];
+                for (i = 0; i < lowpass_height; i++) {
+                    inverse_temporal_filter(dst, low, high, lowpass_width * 2,  pic->linesize[act_plane]/2, 0);
+                    low  += lowpass_width * 2;
+                    high += lowpass_width * 2;
+                    dst  += pic->linesize[act_plane];
+                }
+            }
+        }
+    } else if (s->sample_type == 1) {
+        int16_t *low, *high, *dst;
+        int lowpass_height, lowpass_width;
+        for (plane = 0; plane < planes && !ret; plane++) {
+            int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
+            lowpass_height  = s->plane[plane].band[4][1].height;
+            lowpass_width   = s->plane[plane].band[4][1].width;
+            if (s->progressive) {
+                dst = (int16_t *)pic->data[act_plane];
+                low  = s->plane[plane].l_h[8];
+                high = s->plane[plane].l_h[9];
+                for (i = 0; i < lowpass_height * 2; i++) {
+                    horiz_filter(dst, low, high, lowpass_width);
+                    low  += lowpass_width;
+                    high += lowpass_width;
+                    dst  += pic->linesize[act_plane] / 2;
+                }
+            } else {
+                dst  = (int16_t *)pic->data[act_plane];
+                low  = s->plane[plane].l_h[8];
+                high = s->plane[plane].l_h[9];
+                for (i = 0; i < lowpass_height; i++) {
+                    inverse_temporal_filter(dst, low, high, lowpass_width * 2,  pic->linesize[act_plane]/2, 0);
+                    low  += lowpass_width * 2;
+                    high += lowpass_width * 2;
+                    dst  += pic->linesize[act_plane];
+                }
+            }
+        }
+    }
 
 end:
     if (ret < 0)
@@ -961,6 +1342,6 @@  AVCodec ff_cfhd_decoder = {
     .init             = cfhd_init,
     .close            = cfhd_close,
     .decode           = cfhd_decode,
-    .capabilities     = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
-    .caps_internal    = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
+    .capabilities     = AV_CODEC_CAP_DR1,
+    .caps_internal    = FF_CODEC_CAP_INIT_CLEANUP,
 };
diff --git a/libavcodec/cfhd.h b/libavcodec/cfhd.h
index 4f2c82d8bc..047c0f2028 100644
--- a/libavcodec/cfhd.h
+++ b/libavcodec/cfhd.h
@@ -31,15 +31,14 @@ 
 #include "vlc.h"
 
 #define VLC_BITS       9
-#define SUBBAND_COUNT 10
-
+#define SUBBAND_COUNT 17
 typedef struct CFHD_RL_VLC_ELEM {
     int16_t level;
     int8_t len;
     uint16_t run;
 } CFHD_RL_VLC_ELEM;
 
-#define DWT_LEVELS 3
+#define DWT_LEVELS 6
 
 typedef struct SubBand {
     int level;
@@ -64,7 +63,7 @@  typedef struct Plane {
 
     /* TODO: merge this into SubBand structure */
     int16_t *subband[SUBBAND_COUNT];
-    int16_t *l_h[8];
+    int16_t *l_h[10];
 
     SubBand band[DWT_LEVELS][4];
 } Plane;
@@ -83,9 +82,13 @@  typedef struct CFHDContext {
 
     CFHD_RL_VLC_ELEM table_18_rl_vlc[4572];
     VLC vlc_18;
-
     GetBitContext gb;
 
+    int sample_type;
+    int transform_type;
+    int encode_method;
+    int first_wavelet;
+    int pframe;
     int coded_width;
     int coded_height;
     int cropped_height;
-- 
2.17.1


From d6ac8a4ee2f08a6d3cab7e7500d740c661062c9f Mon Sep 17 00:00:00 2001
From: Gagandeep Singh <deepgagan231197@gmail.com>
Date: Tue, 14 Aug 2018 00:20:06 +0530
Subject: [GSOC][FFmpeg-devel][PATCH 2/3] lavc/cfhd:corrected decompanding for table 9 used in cfhd

---
 libavcodec/cfhd.c | 25 ++++++++++++++++++++++++-
 1 file changed, 24 insertions(+), 1 deletion(-)

diff --git a/libavcodec/cfhd.c b/libavcodec/cfhd.c
index 3906d7a86d..2c538f0bbd 100644
--- a/libavcodec/cfhd.c
+++ b/libavcodec/cfhd.c
@@ -114,7 +114,30 @@  static inline int dequant_and_decompand(int level, int quantisation, int codeboo
 {
     if (lossless)
         return level;
-    if (codebook == 0 || codebook == 1) {
+    if (codebook == 0) {
+        if (level >= 40 && level < 264) {
+            if (level >= 54) {
+                level  -= 54;
+                level <<= 2;
+                level  += 54;
+            }
+            level  -= 40;
+            level <<= 2;
+            level  += 40;
+        } else if (level <= -40) {
+            level = -level;
+            if (level >= 54) {
+                level  -= 54;
+                level <<= 2;
+                level  += 54;
+            }
+            level  -= 40;
+            level <<= 2;
+            level  += 40;
+            level   = -level;
+        }
+        return level * quantisation;
+    } else if (codebook == 1) {
         int64_t abslevel = abs(level);
         if (level < 264)
             return (abslevel + ((768 * abslevel * abslevel * abslevel) / (255 * 255 * 255))) *
-- 
2.17.1


From fa23549c61a6d8413cdc79c570376c53795a6ff1 Mon Sep 17 00:00:00 2001
From: Gagandeep Singh <deepgagan231197@gmail.com>
Date: Tue, 14 Aug 2018 12:43:20 +0530
Subject: [GSOC][FFmpeg-devel][PATCH 3/3] lavc/cfhd:frame threading support for 3d transform
 progressive and interlaced samples

---
 libavcodec/cfhd.c | 378 ++++++++++++++++++++++++++++------------------
 libavcodec/cfhd.h |   8 +-
 2 files changed, 242 insertions(+), 144 deletions(-)

diff --git a/libavcodec/cfhd.c b/libavcodec/cfhd.c
index 2c538f0bbd..7c298056ca 100644
--- a/libavcodec/cfhd.c
+++ b/libavcodec/cfhd.c
@@ -63,13 +63,23 @@  enum CFHDParam {
 
 static av_cold int cfhd_init(AVCodecContext *avctx)
 {
+    int ret;
+
     CFHDContext *s = avctx->priv_data;
+    if (!avctx->internal->is_copy) {
+        avctx->internal->allocate_progress = 1;
+        ret = ff_cfhd_init_vlcs(s);
+    } else
+        ret = 0;
 
     avctx->bits_per_raw_sample = 10;
     s->avctx                   = avctx;
     s->progressive             = 0;
+    s->i_frame.f = av_frame_alloc();
+    s->p_frame.f = av_frame_alloc();
 
-    return ff_cfhd_init_vlcs(s);
+
+    return ret;
 }
 
 static void init_plane_defaults(CFHDContext *s)
@@ -268,15 +278,18 @@  static void free_buffers(CFHDContext *s)
     for (i = 0; i < FF_ARRAY_ELEMS(s->plane); i++) {
         av_freep(&s->plane[i].idwt_buf);
         av_freep(&s->plane[i].idwt_tmp);
-        if (s->transform_type == 0)
+        if (s->transform_type == 0) {
             for (j = 0; j < 9; j++)
                 s->plane[i].subband[j] = NULL;
-        else
+            for (j = 0; j < 8; j++)
+                s->plane[i].l_h[j] = NULL;
+        }
+        else {
             for (j = 0; j < 17; j++)
                 s->plane[i].subband[j] = NULL;
-
-        for (j = 0; j < 8; j++)
-            s->plane[i].l_h[j] = NULL;
+            for (j = 0; j < 12; j++)
+                s->plane[i].l_h[j] = NULL;
+        }
     }
     s->a_height = 0;
     s->a_width  = 0;
@@ -394,8 +407,10 @@  static int alloc_buffers(AVCodecContext *avctx)
         s->plane[i].l_h[7] = s->plane[i].idwt_tmp + 2 * w2 * h2;
         if (s->transform_type == 2) {
             frame2 = s->plane[i].idwt_tmp + 4 * w2 * h2;
-            s->plane[i].l_h[8] = frame2;
-            s->plane[i].l_h[9] = frame2 + 2 * w2 * h2;
+            s->plane[i].l_h[8]  = frame2;
+            s->plane[i].l_h[9]  = frame2 + 2 * w4 * h4;
+            s->plane[i].l_h[10] = frame2;
+            s->plane[i].l_h[11] = frame2 + 2 * w2 * h2;
             }
     }
 
@@ -406,14 +421,28 @@  static int alloc_buffers(AVCodecContext *avctx)
     return 0;
 }
 
+static int update_thread_context(AVCodecContext *dst, const AVCodecContext *src)
+{
+    CFHDContext *csrc = src->priv_data;
+    CFHDContext *cdst = dst->priv_data;
+    cdst->transform_type = csrc->transform_type;
+    if (csrc->sample_type != 1 && csrc->transform_type != 0) {
+        cdst->progressive = csrc->progressive;
+        cdst->picture = &csrc->p_frame;
+        cdst->connection = &csrc->i_frame;
+        cdst->buffers = csrc->plane;
+    }
+
+    return 0;
+}
+
 static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                        AVPacket *avpkt)
 {
     CFHDContext *s = avctx->priv_data;
     GetByteContext gb;
     ThreadFrame frame = { .f = data };
-    AVFrame *pic = data;
-    int ret = 0, i, j, planes, plane, got_buffer = 0;
+    int ret = 0, i, j, planes, plane, got_buffer = 0, progress1 = 1, progress2 = 1;
     int16_t *coeff_data;
 
     s->coded_format = AV_PIX_FMT_YUV422P10;
@@ -537,7 +566,9 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
         } else if (tag == 1) {
             s->sample_type = data;
             if (data == 2)
-                s->pframe = 1;
+                s->pframe  = 1;
+            else if (data == 1)
+                s->transform_type = 2;
             av_log(avctx, AV_LOG_DEBUG, "Sample type? %"PRIu16"\n", data);
         } else if (tag == 10) {
             s->transform_type = data;
@@ -657,21 +688,54 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                     return ret;
                 }
             }
-            ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
-            if (ret < 0)
-                return ret;
-            if (s->cropped_height)
-                avctx->height = s->cropped_height;
-            frame.f->width =
-            frame.f->height = 0;
-
-            if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
-                return ret;
-
+            if (s->transform_type == 2) {
+                if (s->sample_type != 1) {
+                    s->picture = &s->i_frame;
+                    s->connection = &s->p_frame;
+                    s->buffers = s->plane;
+                }
+                ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
+                if (ret < 0)
+                    return ret;
+                if (s->sample_type != 1) {
+                    if (s->i_frame.f->data[0])
+                        ff_thread_release_buffer(avctx, &s->i_frame);
+                    if (s->p_frame.f->data[0])
+                        ff_thread_release_buffer(avctx, &s->p_frame);
+                    av_frame_copy_props(s->i_frame.f, frame.f);
+                    av_frame_copy_props(s->p_frame.f, frame.f);
+                    if (s->cropped_height)
+                        avctx->height = s->cropped_height;
+                    s->picture->f->width =
+                    s->picture->f->height = 0;
+                    s->connection->f->width =
+                    s->connection->f->height = 0;
+                    if ((ret = ff_thread_get_buffer(avctx, s->picture, 0)) < 0)
+                        return ret;
+                    if ((ret = ff_thread_get_buffer(avctx, s->connection, 0)) < 0)
+                        return ret;
+                }
+            } else {
+                s->picture = &s->i_frame;
+                s->buffers = s->plane;
+                if (s->picture->f->data[0])
+                    ff_thread_release_buffer(avctx, s->picture);
+                av_frame_copy_props(s->i_frame.f, frame.f);
+                ret = ff_set_dimensions(avctx, s->coded_width, s->coded_height);
+                if (ret < 0)
+                    return ret;
+                if (s->cropped_height)
+                    avctx->height = s->cropped_height;
+                s->picture->f->width =
+                s->picture->f->height = 0;
+                if ((ret = ff_thread_get_buffer(avctx, s->picture, 0)) < 0)
+                    return ret;
+            }
             s->coded_width = 0;
             s->coded_height = 0;
             s->coded_format = AV_PIX_FMT_NONE;
             got_buffer = 1;
+            ff_thread_finish_setup(avctx);
         }
         coeff_data = s->plane[s->channel_num].subband[s->subband_num_actual];
 
@@ -835,6 +899,8 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                        &coeff_data[(highpass_height - 1) * highpass_stride],
                        highpass_stride * sizeof(*coeff_data));
             }
+            if (s->transform_type == 2 && s->subband_num_actual == 10)
+                ff_thread_report_progress(s->picture, progress1 += 1, 0);
         }
     }
     //disabled to run mountain sample file
@@ -975,7 +1041,6 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
             ret = AVERROR(EINVAL);
             goto end;
         }
-
         av_log(avctx, AV_LOG_DEBUG, "Level 3 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
         if (s->progressive) {
             low    = s->plane[plane].subband[0];
@@ -998,18 +1063,18 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                 output++;
             }
 
-            dst = (int16_t *)pic->data[act_plane];
+            dst = (int16_t *)s->picture->f->data[act_plane];
             low  = s->plane[plane].l_h[6];
             high = s->plane[plane].l_h[7];
             for (i = 0; i < lowpass_height * 2; i++) {
                 horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
                 low  += lowpass_width;
                 high += lowpass_width;
-                dst  += pic->linesize[act_plane] / 2;
+                dst  += s->picture->f->linesize[act_plane] / 2;
             }
         } else {
-            av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", pic->interlaced_frame);
-            pic->interlaced_frame = 1;
+            av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", s->picture->f->interlaced_frame);
+            s->picture->f->interlaced_frame = 1;
             low    = s->plane[plane].subband[0];
             high   = s->plane[plane].subband[7];
             output = s->plane[plane].l_h[6];
@@ -1030,23 +1095,23 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                 output += lowpass_width * 2;
             }
 
-            dst  = (int16_t *)pic->data[act_plane];
+            dst  = (int16_t *)s->picture->f->data[act_plane];
             low  = s->plane[plane].l_h[6];
             high = s->plane[plane].l_h[7];
             for (i = 0; i < lowpass_height; i++) {
-                inverse_temporal_filter(dst, low, high, lowpass_width * 2,  pic->linesize[act_plane]/2, 0);
+                inverse_temporal_filter(dst, low, high, lowpass_width * 2,  s->picture->f->linesize[act_plane]/2, 0);
                 low  += lowpass_width * 2;
                 high += lowpass_width * 2;
-                dst  += pic->linesize[act_plane];
+                dst  += s->picture->f->linesize[act_plane];
             }
         }
     }
-    //this is the serial version on ip sample decoding so buffers allocated using alloc_buffers() are not freed,
-    //so the stored decoded coefficients data is used for generating the second frame once empty packet is passed in sample_type = 1
+    av_frame_ref(frame.f, s->picture->f);
+    ff_thread_report_progress(s->picture, INT_MAX, 0);
     } else if (s->transform_type == 2 && s->sample_type != 1) {
         for (plane = 0; plane < planes && !ret; plane++) {
-            int lowpass_height  = s->plane[plane].band[0][0].height;
-            int lowpass_width   = s->plane[plane].band[0][0].width;
+            int lowpass_height  = s->plane[plane].band[0][1].height;
+            int lowpass_width   = s->plane[plane].band[0][1].width;
             int highpass_stride = s->plane[plane].band[0][1].stride;
             int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
             int16_t *low, *high, *output, *dst;
@@ -1058,8 +1123,6 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                 goto end;
             }
 
-            av_log(avctx, AV_LOG_DEBUG, "Decoding level 1 plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
-
             low    = s->plane[plane].subband[0];
             high   = s->plane[plane].subband[2];
             output = s->plane[plane].l_h[0];
@@ -1110,8 +1173,6 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                 goto end;
             }
 
-            av_log(avctx, AV_LOG_DEBUG, "Level 2 lowpass plane %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
-
             low    = s->plane[plane].subband[0];
             high   = s->plane[plane].subband[5];
             output = s->plane[plane].l_h[3];
@@ -1149,40 +1210,9 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                 output += lowpass_width * 2;
             }
 
-            low    = s->plane[plane].subband[7];
-            high   = s->plane[plane].subband[9];
-            output = s->plane[plane].l_h[3];
-            for (i = 0; i < lowpass_width; i++) {
-                vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
-                low++;
-                high++;
-                output++;
-            }
-
-            low    = s->plane[plane].subband[8];
-            high   = s->plane[plane].subband[10];
-            output = s->plane[plane].l_h[4];
-            for (i = 0; i < lowpass_width; i++) {
-                vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
-                low++;
-                high++;
-                output++;
-            }
-
-            low    = s->plane[plane].l_h[3];
-            high   = s->plane[plane].l_h[4];
-            output = s->plane[plane].subband[7];
-            for (i = 0; i < lowpass_height * 2; i++) {
-                horiz_filter(output, low, high, lowpass_width);
-                low    += lowpass_width;
-                high   += lowpass_width;
-                output += lowpass_width * 2;
-            }
-
             lowpass_height  = s->plane[plane].band[4][1].height;
             lowpass_width   = s->plane[plane].band[4][1].width;
             highpass_stride = s->plane[plane].band[4][1].stride;
-            av_log(avctx, AV_LOG_DEBUG, "temporal level %i %i %i %i\n", plane, lowpass_height, lowpass_width, highpass_stride);
 
             if (lowpass_height > s->plane[plane].band[4][1].a_height || lowpass_width > s->plane[plane].band[4][1].a_width ||
                 !highpass_stride || s->plane[plane].band[4][1].width > s->plane[plane].band[4][1].a_width) {
@@ -1190,7 +1220,7 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                 ret = AVERROR(EINVAL);
                 goto end;
             }
-
+            ff_thread_await_progress(s->connection, progress2 += 1, 0);
             low    = s->plane[plane].subband[0];
             high   = s->plane[plane].subband[7];
             output = s->plane[plane].subband[0];
@@ -1199,6 +1229,7 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                 low    += lowpass_width;
                 high   += lowpass_width;
             }
+            ff_thread_report_progress(s->picture, progress1 += 1, 0);
             if (s->progressive) {
                 low    = s->plane[plane].subband[0];
                 high   = s->plane[plane].subband[15];
@@ -1220,37 +1251,17 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                     output++;
                 }
 
-                low    = s->plane[plane].subband[7];
-                high   = s->plane[plane].subband[12];
-                output = s->plane[plane].l_h[8];
-                for (i = 0; i < lowpass_width; i++) {
-                    vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
-                    low++;
-                    high++;
-                    output++;
-                }
-
-                low    = s->plane[plane].subband[11];
-                high   = s->plane[plane].subband[13];
-                output = s->plane[plane].l_h[9];
-                for (i = 0; i < lowpass_width; i++) {
-                    vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
-                    low++;
-                    high++;
-                    output++;
-                }
-
-                dst = (int16_t *)pic->data[act_plane];
+                dst = (int16_t *)s->picture->f->data[act_plane];
                 low  = s->plane[plane].l_h[6];
                 high = s->plane[plane].l_h[7];
                 for (i = 0; i < lowpass_height * 2; i++) {
-                    horiz_filter(dst, low, high, lowpass_width);
+                    horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
                     low  += lowpass_width;
                     high += lowpass_width;
-                    dst  += pic->linesize[act_plane] / 2;
+                    dst  += s->picture->f->linesize[act_plane] / 2;
                 }
             } else {
-                pic->interlaced_frame = 1;
+                s->picture->f->interlaced_frame = 1;
                 low    = s->plane[plane].subband[0];
                 high   = s->plane[plane].subband[14];
                 output = s->plane[plane].l_h[6];
@@ -1271,67 +1282,137 @@  static int cfhd_decode(AVCodecContext *avctx, void *data, int *got_frame,
                     output += lowpass_width * 2;
                 }
 
-                low    = s->plane[plane].subband[7];
-                high   = s->plane[plane].subband[11];
-                output = s->plane[plane].l_h[8];
-                for (i = 0; i < lowpass_height; i++) {
-                    horiz_filter(output, low, high, lowpass_width);
-                    low    += lowpass_width;
-                    high   += lowpass_width;
-                    output += lowpass_width * 2;
-                }
-
-                low    = s->plane[plane].subband[12];
-                high   = s->plane[plane].subband[13];
-                output = s->plane[plane].l_h[9];
-                for (i = 0; i < lowpass_height; i++) {
-                    horiz_filter(output, low, high, lowpass_width);
-                    low    += lowpass_width;
-                    high   += lowpass_width;
-                    output += lowpass_width * 2;
-                }
-
-
-                dst  = (int16_t *)pic->data[act_plane];
+                dst  = (int16_t *)s->picture->f->data[act_plane];
                 low  = s->plane[plane].l_h[6];
                 high = s->plane[plane].l_h[7];
                 for (i = 0; i < lowpass_height; i++) {
-                    inverse_temporal_filter(dst, low, high, lowpass_width * 2,  pic->linesize[act_plane]/2, 0);
+                    inverse_temporal_filter(dst, low, high, lowpass_width * 2,  s->picture->f->linesize[act_plane]/2, 0);
                     low  += lowpass_width * 2;
                     high += lowpass_width * 2;
-                    dst  += pic->linesize[act_plane];
+                    dst  += s->picture->f->linesize[act_plane];
                 }
             }
         }
+        ff_thread_report_progress(s->picture, INT_MAX, 0);
+        ff_thread_await_progress(s->connection, INT_MAX, 0);
+        av_frame_ref(frame.f, s->picture->f);
     } else if (s->sample_type == 1) {
-        int16_t *low, *high, *dst;
-        int lowpass_height, lowpass_width;
+        int16_t *low, *high, *dst, *output;
+        int lowpass_height, lowpass_width, highpass_stride, act_plane;
+        progress1 = 1, progress2 = 1;
         for (plane = 0; plane < planes && !ret; plane++) {
-            int act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
-            lowpass_height  = s->plane[plane].band[4][1].height;
-            lowpass_width   = s->plane[plane].band[4][1].width;
+            ff_thread_await_progress(s->connection, progress1 += 1, 0);
+            // highpass inverse for temporal
+            lowpass_height  = s->buffers[plane].band[1][1].a_height;
+            lowpass_width   = s->buffers[plane].band[1][1].a_width;
+            highpass_stride = s->buffers[plane].band[1][1].a_width;
+
+            low    = s->buffers[plane].subband[7];
+            high   = s->buffers[plane].subband[9];
+            output = s->buffers[plane].l_h[8];
+            for (i = 0; i < lowpass_width; i++) {
+                vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
+                low++;
+                high++;
+                output++;
+            }
+
+            low    = s->buffers[plane].subband[8];
+            high   = s->buffers[plane].subband[10];
+            output = s->buffers[plane].l_h[9];
+            for (i = 0; i < lowpass_width; i++) {
+                vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
+                low++;
+                high++;
+                output++;
+            }
+
+            low    = s->buffers[plane].l_h[8];
+            high   = s->buffers[plane].l_h[9];
+            output = s->buffers[plane].subband[7];
+            for (i = 0; i < lowpass_height * 2; i++) {
+                horiz_filter(output, low, high, lowpass_width);
+                low    += lowpass_width;
+                high   += lowpass_width;
+                output += lowpass_width * 2;
+            }
+            ff_thread_report_progress(s->picture, progress2 += 1, 0);
+        }
+        for (plane = 0; plane < planes && !ret; plane++) {
+            ff_thread_await_progress(s->connection, progress1 += 1, 0);
+
+            act_plane = plane == 1 ? 2 : plane == 2 ? 1 : plane;
+            lowpass_height  = s->buffers[plane].band[4][1].a_height;
+            lowpass_width   = s->buffers[plane].band[4][1].a_width;
+            highpass_stride = s->buffers[plane].band[4][1].a_width;
+
             if (s->progressive) {
-                dst = (int16_t *)pic->data[act_plane];
-                low  = s->plane[plane].l_h[8];
-                high = s->plane[plane].l_h[9];
+                low    = s->buffers[plane].subband[7];
+                high   = s->buffers[plane].subband[12];
+                output = s->buffers[plane].l_h[10];
+                for (i = 0; i < lowpass_width; i++) {
+                    vert_filter(output, lowpass_width, low, lowpass_width, high, highpass_stride, lowpass_height);
+                    low++;
+                    high++;
+                    output++;
+                }
+
+                low    = s->buffers[plane].subband[11];
+                high   = s->buffers[plane].subband[13];
+                output = s->buffers[plane].l_h[11];
+                for (i = 0; i < lowpass_width; i++) {
+                    vert_filter(output, lowpass_width, low, highpass_stride, high, highpass_stride, lowpass_height);
+                    low++;
+                    high++;
+                    output++;
+                }
+
+                dst = (int16_t *)s->picture->f->data[act_plane];
+                low  = s->buffers[plane].l_h[10];
+                high = s->buffers[plane].l_h[11];
                 for (i = 0; i < lowpass_height * 2; i++) {
-                    horiz_filter(dst, low, high, lowpass_width);
+                    horiz_filter_clip(dst, low, high, lowpass_width, s->bpc);
                     low  += lowpass_width;
                     high += lowpass_width;
-                    dst  += pic->linesize[act_plane] / 2;
+                    dst  += s->picture->f->linesize[act_plane] / 2;
                 }
             } else {
-                dst  = (int16_t *)pic->data[act_plane];
-                low  = s->plane[plane].l_h[8];
-                high = s->plane[plane].l_h[9];
+                av_log(avctx, AV_LOG_DEBUG, "interlaced frame ? %d", s->picture->f->interlaced_frame);
+                s->picture->f->interlaced_frame = 1;
+                low    = s->buffers[plane].subband[7];
+                high   = s->buffers[plane].subband[11];
+                output = s->buffers[plane].l_h[10];
+                for (i = 0; i < lowpass_height; i++) {
+                    horiz_filter(output, low, high, lowpass_width);
+                    low    += lowpass_width;
+                    high   += lowpass_width;
+                    output += lowpass_width * 2;
+                }
+
+                low    = s->buffers[plane].subband[12];
+                high   = s->buffers[plane].subband[13];
+                output = s->buffers[plane].l_h[11];
                 for (i = 0; i < lowpass_height; i++) {
-                    inverse_temporal_filter(dst, low, high, lowpass_width * 2,  pic->linesize[act_plane]/2, 0);
+                    horiz_filter(output, low, high, lowpass_width);
+                    low    += lowpass_width;
+                    high   += lowpass_width;
+                    output += lowpass_width * 2;
+                }
+
+                dst  = (int16_t *)s->picture->f->data[act_plane];
+                low  = s->buffers[plane].l_h[10];
+                high = s->buffers[plane].l_h[11];
+                for (i = 0; i < lowpass_height; i++) {
+                    inverse_temporal_filter(dst, low, high, lowpass_width * 2,  s->picture->f->linesize[act_plane]/2, 0);
                     low  += lowpass_width * 2;
                     high += lowpass_width * 2;
-                    dst  += pic->linesize[act_plane];
+                    dst  += s->picture->f->linesize[act_plane];
                 }
             }
         }
+        ff_thread_report_progress(s->picture, INT_MAX, 0);
+        ff_thread_await_progress(s->connection, INT_MAX, 0);
+        av_frame_ref(frame.f, s->picture->f);
     }
 
 end:
@@ -1352,19 +1433,30 @@  static av_cold int cfhd_close(AVCodecContext *avctx)
         ff_free_vlc(&s->vlc_9);
         ff_free_vlc(&s->vlc_18);
     }
+    if (s->i_frame.f && s->i_frame.f->data[0])
+        ff_thread_release_buffer(avctx, &s->i_frame);
+    if (s->p_frame.f && s->p_frame.f->data[0])
+        ff_thread_release_buffer(avctx, &s->p_frame);
+
+    if (s->i_frame.f)
+        av_frame_free(&s->i_frame.f);
+    if (s->p_frame.f)
+        av_frame_free(&s->p_frame.f);
 
     return 0;
 }
 
 AVCodec ff_cfhd_decoder = {
-    .name             = "cfhd",
-    .long_name        = NULL_IF_CONFIG_SMALL("Cineform HD"),
-    .type             = AVMEDIA_TYPE_VIDEO,
-    .id               = AV_CODEC_ID_CFHD,
-    .priv_data_size   = sizeof(CFHDContext),
-    .init             = cfhd_init,
-    .close            = cfhd_close,
-    .decode           = cfhd_decode,
-    .capabilities     = AV_CODEC_CAP_DR1,
-    .caps_internal    = FF_CODEC_CAP_INIT_CLEANUP,
+    .name                  = "cfhd",
+    .long_name             = NULL_IF_CONFIG_SMALL("Cineform HD"),
+    .type                  = AVMEDIA_TYPE_VIDEO,
+    .id                    = AV_CODEC_ID_CFHD,
+    .priv_data_size        = sizeof(CFHDContext),
+    .init                  = cfhd_init,
+    .close                 = cfhd_close,
+    .decode                = cfhd_decode,
+    .init_thread_copy      = ONLY_IF_THREADS_ENABLED(cfhd_init),
+    .update_thread_context = ONLY_IF_THREADS_ENABLED(update_thread_context),
+    .capabilities          = AV_CODEC_CAP_DR1 | AV_CODEC_CAP_FRAME_THREADS,
+    .caps_internal         = FF_CODEC_CAP_INIT_THREADSAFE | FF_CODEC_CAP_INIT_CLEANUP,
 };
diff --git a/libavcodec/cfhd.h b/libavcodec/cfhd.h
index 047c0f2028..d7a2ffe0a7 100644
--- a/libavcodec/cfhd.h
+++ b/libavcodec/cfhd.h
@@ -29,6 +29,7 @@ 
 #include "bytestream.h"
 #include "get_bits.h"
 #include "vlc.h"
+#include "thread.h"
 
 #define VLC_BITS       9
 #define SUBBAND_COUNT 17
@@ -63,7 +64,7 @@  typedef struct Plane {
 
     /* TODO: merge this into SubBand structure */
     int16_t *subband[SUBBAND_COUNT];
-    int16_t *l_h[10];
+    int16_t *l_h[12];
 
     SubBand band[DWT_LEVELS][4];
 } Plane;
@@ -76,6 +77,10 @@  typedef struct Peak {
 
 typedef struct CFHDContext {
     AVCodecContext *avctx;
+    ThreadFrame i_frame;
+    ThreadFrame p_frame;
+    ThreadFrame *connection;
+    ThreadFrame *picture;
 
     CFHD_RL_VLC_ELEM table_9_rl_vlc[2088];
     VLC vlc_9;
@@ -116,6 +121,7 @@  typedef struct CFHDContext {
 
     uint8_t prescale_shift[3];
     Plane plane[4];
+    Plane *buffers;
     Peak peak;
 } CFHDContext;