diff mbox

[FFmpeg-devel,1/2] ffmpeg: handle flushing and non-flushing separately in sub2video

Message ID CAEu79SZnpp-nCYndfEA6Sm+1c7nGb0ONw7Phey5xznfMmLQLBQ@mail.gmail.com
State New
Headers show

Commit Message

Jan Ekström Aug. 31, 2018, 12:14 p.m. UTC
The initial fix for sub2video flushing during filter chain restarts
seemed to add additional buffering into the filter chain, which in case
of files did work, but when handling live streams would cause premature
failure.

This fix reverts the timestamp filtering, and separates flushing from
normal heartbeat functionality by adding an explicit parameter.
diff mbox

Patch

From 9ed9d80535a1e6a2a70faef296fd8d3cbfde8e55 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Jan=20Ekstr=C3=B6m?= <jan.ekstrom@aminocom.com>
Date: Thu, 26 Jul 2018 19:07:38 +0300
Subject: [PATCH 1/2] ffmpeg: handle flushing and non-flushing separately in
 sub2video

This stops accidental flushes from happening in sub2video in case
the filter chain gets a re-configuration/re-init. Heartbeats
should not cause a flush and additional buffering in the filter chain.

Properly fixes filter chain flush handling, as previously the
filter chain would keep buffering frames.

Additionally, document the various stages of sub2video.
---
 fftools/ffmpeg.c        | 57 +++++++++++++++++++++++++++++++++++------
 fftools/ffmpeg.h        |  2 +-
 fftools/ffmpeg_filter.c |  2 +-
 3 files changed, 51 insertions(+), 10 deletions(-)

diff --git a/fftools/ffmpeg.c b/fftools/ffmpeg.c
index 934dc71a74..758e3839a3 100644
--- a/fftools/ffmpeg.c
+++ b/fftools/ffmpeg.c
@@ -237,7 +237,7 @@  static void sub2video_push_ref(InputStream *ist, int64_t pts)
     }
 }
 
-void sub2video_update(InputStream *ist, AVSubtitle *sub)
+void sub2video_update(InputStream *ist, AVSubtitle *sub, int flush)
 {
     AVFrame *frame = ist->sub2video.frame;
     int8_t *dst;
@@ -254,9 +254,47 @@  void sub2video_update(InputStream *ist, AVSubtitle *sub)
                                  AV_TIME_BASE_Q, ist->st->time_base);
         num_rects = sub->num_rects;
     } else {
-        pts       = ist->sub2video.end_pts;
-        end_pts   = INT64_MAX;
         num_rects = 0;
+
+        if (flush) {
+            /* Flush of the sub2video filter chain was requested.
+               Utilize previous end_pts as the pts, and set end_pts to
+               the filter chain flush value (INT64_MAX). */
+            pts     = ist->sub2video.end_pts;
+            end_pts = INT64_MAX;
+
+            av_log(ist->dec_ctx, AV_LOG_DEBUG,
+                   "nullptr AVSubtitle (flush): utilized values: "
+                   "pts=%"PRId64", end_pts=%"PRId64"\n",
+                   pts, end_pts);
+        } else {
+            /* We do not have an active sub-picture and this was not a flush.
+               We will be sending a blank heartbeat frame to the filter
+               chain. */
+
+            if (ist->sub2video.last_pts != AV_NOPTS_VALUE &&
+                ist->sub2video.last_pts < INT64_MAX) {
+                /* if the last PTS is not invalid (it's not unknown/unset or
+                   the flush value), use the last PTS as the start point
+                   for this filter chain heartbeat frame. */
+                pts = ist->sub2video.last_pts;
+            } else {
+                /* If the last sub2video PTS is invalid, use the provided
+                   sub2video frame PTS (set to the current input
+                   timestamp by sub2video_heartbeat) */
+                pts = frame->pts;
+            }
+
+            /* FIXME: decide if pts+1 is good enough for this, currently set
+               for .2 seconds */
+            end_pts = pts + av_rescale_q(2*1000LL, AV_TIME_BASE_Q,
+                                         ist->st->time_base);
+
+            av_log(ist->dec_ctx, AV_LOG_DEBUG,
+                   "nullptr AVSubtitle (no flush): utilized values: "
+                   "pts=%"PRId64", end_pts=%"PRId64"\n",
+                   pts, end_pts);
+        }
     }
     if (sub2video_get_blank_frame(ist) < 0) {
         av_log(ist->dec_ctx, AV_LOG_ERROR,
@@ -291,9 +329,12 @@  static void sub2video_heartbeat(InputStream *ist, int64_t pts)
         /* do not send the heartbeat frame if the subtitle is already ahead */
         if (pts2 <= ist2->sub2video.last_pts)
             continue;
-        if (pts2 >= ist2->sub2video.end_pts ||
-            (!ist2->sub2video.frame->data[0] && ist2->sub2video.end_pts < INT64_MAX))
-            sub2video_update(ist2, NULL);
+        if (pts2 >= ist2->sub2video.end_pts || !ist2->sub2video.frame->data[0]) {
+            /* if we do not have a subtitle to show for a specific time,
+               add the current stream time as the fall-back PTS */
+            ist2->sub2video.frame->pts = pts2;
+            sub2video_update(ist2, NULL, 0);
+        }
         for (j = 0, nb_reqs = 0; j < ist2->nb_filters; j++)
             nb_reqs += av_buffersrc_get_nb_failed_requests(ist2->filters[j]->filter);
         if (nb_reqs)
@@ -307,7 +348,7 @@  static void sub2video_flush(InputStream *ist)
     int ret;
 
     if (ist->sub2video.end_pts < INT64_MAX)
-        sub2video_update(ist, NULL);
+        sub2video_update(ist, NULL, 1);
     for (i = 0; i < ist->nb_filters; i++) {
         ret = av_buffersrc_add_frame(ist->filters[i]->filter, NULL);
         if (ret != AVERROR_EOF && ret < 0)
@@ -2517,7 +2558,7 @@  static int transcode_subtitles(InputStream *ist, AVPacket *pkt, int *got_output,
         return ret;
 
     if (ist->sub2video.frame) {
-        sub2video_update(ist, &subtitle);
+        sub2video_update(ist, &subtitle, 0);
     } else if (ist->nb_filters) {
         if (!ist->sub2video.sub_queue)
             ist->sub2video.sub_queue = av_fifo_alloc(8 * sizeof(AVSubtitle));
diff --git a/fftools/ffmpeg.h b/fftools/ffmpeg.h
index eb1eaf6363..d2aba21ed1 100644
--- a/fftools/ffmpeg.h
+++ b/fftools/ffmpeg.h
@@ -646,7 +646,7 @@  int filtergraph_is_simple(FilterGraph *fg);
 int init_simple_filtergraph(InputStream *ist, OutputStream *ost);
 int init_complex_filtergraph(FilterGraph *fg);
 
-void sub2video_update(InputStream *ist, AVSubtitle *sub);
+void sub2video_update(InputStream *ist, AVSubtitle *sub, int flush);
 
 int ifilter_parameters_from_frame(InputFilter *ifilter, const AVFrame *frame);
 
diff --git a/fftools/ffmpeg_filter.c b/fftools/ffmpeg_filter.c
index 6518d50870..4cb81ea41f 100644
--- a/fftools/ffmpeg_filter.c
+++ b/fftools/ffmpeg_filter.c
@@ -1161,7 +1161,7 @@  int configure_filtergraph(FilterGraph *fg)
             while (av_fifo_size(ist->sub2video.sub_queue)) {
                 AVSubtitle tmp;
                 av_fifo_generic_read(ist->sub2video.sub_queue, &tmp, sizeof(tmp), NULL);
-                sub2video_update(ist, &tmp);
+                sub2video_update(ist, &tmp, 0);
                 avsubtitle_free(&tmp);
             }
         }
-- 
2.17.1