So far, we have an audio delay filter (adelay), but no video delay.
This patch adds one.

(NB the difference with tpad - tpad does not alter the pts of the
passed-through frames, which causes A/V sync badness with large delays
[at least when running live]. delay uses a circular buffer of frames,
and emits each delayed frame with the pts of the frame that replaced
it in the buffer.)

Meredydd
From 2e6af94eec0b3b8dc61b939ab35ed27130be6e15 Mon Sep 17 00:00:00 2001
From: Meredydd Luff <mered...@senatehouse.org>
Date: Tue, 15 Jan 2019 00:16:13 +0000
Subject: [PATCH] avfilter: Add delay filter

The 'delay' filter delays video by the specified number of frames,
inserting frames of a solid color at the start. Unlike tpad,
delay adjusts the timestamp (pts) of the delayed video frames.

Signed-off-by: Meredydd Luff <mered...@senatehouse.org>
---
 doc/filters.texi         |  33 +++++++
 libavfilter/Makefile     |   1 +
 libavfilter/allfilters.c |   1 +
 libavfilter/vf_delay.c   | 237 +++++++++++++++++++++++++++++++++++++++++++++++
 4 files changed, 272 insertions(+)
 create mode 100644 libavfilter/vf_delay.c

diff --git a/doc/filters.texi b/doc/filters.texi
index fc98323..65eac3d 100644
--- a/doc/filters.texi
+++ b/doc/filters.texi
@@ -8002,6 +8002,39 @@ delogo=x=0:y=0:w=100:h=77:band=10
 
 @end itemize
 
+@section delay
+
+Delay video frames.
+
+It accepts the following parameters:
+@table @option
+
+@item delay
+Specify the number of frames by which the video will be delayed. (Minimum 1)
+
+@item color
+Specify the color of the initial delay frames (default black)
+
+@end table
+
+@subsection Examples
+
+@itemize
+@item
+Delay video by 25 frames.
+@example
+delay=25
+@end example
+
+@item
+Delay video by 50 frames. The first 50 frames will be solid red.
+@example
+delay=delay=50:color=red
+@end example
+
+@end itemize
+
+
 @section deshake
 
 Attempt to fix small changes in horizontal and/or vertical shift. This
diff --git a/libavfilter/Makefile b/libavfilter/Makefile
index bc642ac..ff469a6 100644
--- a/libavfilter/Makefile
+++ b/libavfilter/Makefile
@@ -202,6 +202,7 @@ OBJS-$(CONFIG_DEFLICKER_FILTER)              += vf_deflicker.o
 OBJS-$(CONFIG_DEINTERLACE_QSV_FILTER)        += vf_deinterlace_qsv.o
 OBJS-$(CONFIG_DEINTERLACE_VAAPI_FILTER)      += vf_deinterlace_vaapi.o vaapi_vpp.o
 OBJS-$(CONFIG_DEJUDDER_FILTER)               += vf_dejudder.o
+OBJS-$(CONFIG_DELAY_FILTER)                  += vf_delay.o
 OBJS-$(CONFIG_DELOGO_FILTER)                 += vf_delogo.o
 OBJS-$(CONFIG_DENOISE_VAAPI_FILTER)          += vf_misc_vaapi.o vaapi_vpp.o
 OBJS-$(CONFIG_DESHAKE_FILTER)                += vf_deshake.o
diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c
index c51ae0f..08248f2 100644
--- a/libavfilter/allfilters.c
+++ b/libavfilter/allfilters.c
@@ -190,6 +190,7 @@ extern AVFilter ff_vf_deflicker;
 extern AVFilter ff_vf_deinterlace_qsv;
 extern AVFilter ff_vf_deinterlace_vaapi;
 extern AVFilter ff_vf_dejudder;
+extern AVFilter ff_vf_delay;
 extern AVFilter ff_vf_delogo;
 extern AVFilter ff_vf_denoise_vaapi;
 extern AVFilter ff_vf_deshake;
diff --git a/libavfilter/vf_delay.c b/libavfilter/vf_delay.c
new file mode 100644
index 0000000..0c5e267
--- /dev/null
+++ b/libavfilter/vf_delay.c
@@ -0,0 +1,237 @@
+/*
+ * Copyright (c) 2019 Meredydd Luff
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavutil/avassert.h"
+#include "libavutil/channel_layout.h"
+#include "libavutil/opt.h"
+#include "libavutil/timestamp.h"
+#include "avfilter.h"
+#include "audio.h"
+#include "filters.h"
+#include "internal.h"
+#include "formats.h"
+#include "drawutils.h"
+
+typedef struct DelayContext {
+    const AVClass *class;
+
+    AVFrame ** q;
+    unsigned q_next;
+    unsigned q_fill;
+
+    int delay;
+    uint8_t rgba_color[4];  ///< color for the delayed frames
+
+    int64_t frame_incr;
+    int64_t pts;
+    FFDrawContext draw;
+    FFDrawColor color;
+    int eof;
+} DelayContext;
+
+#define OFFSET(x) offsetof(DelayContext, x)
+#define VF AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM
+
+static const AVOption delay_options[] = {
+    { "delay", "set the number of frames to delay input",              OFFSET(delay),      AV_OPT_TYPE_INT,   {.i64=0},        0,   INT_MAX, VF },
+    { "color", "set the color of the added frames",                    OFFSET(rgba_color), AV_OPT_TYPE_COLOR, {.str="black"},  0,         0, VF },
+    { NULL }
+};
+
+AVFILTER_DEFINE_CLASS(delay);
+
+static int query_formats(AVFilterContext *ctx)
+{
+    return ff_set_common_formats(ctx, ff_draw_supported_pixel_formats(0));
+}
+
+static inline void advance_queue(DelayContext *s)
+{
+    s->q_next++;
+    if (s->q_next == s->delay) {
+        s->q_next = 0;
+    }
+}
+
+static inline void swap_queue_next(DelayContext *s, AVFrame **f)
+{
+    AVFrame *frame_in = *f;
+    *f = s->q[s->q_next];
+    s->q[s->q_next] = frame_in;
+    advance_queue(s);
+    if (s->q_fill != s->delay && frame_in) {
+        s->q_fill++;
+    }
+}
+
+static inline AVFrame * queue_peek(DelayContext *s)
+{
+    return s->q[s->q_next];
+}
+
+static int filter_frame(AVFilterContext *ctx, AVFrame *frame)
+{
+    AVFilterLink *outlink = ctx->outputs[0];
+    DelayContext *s = ctx->priv;
+    AVFrame *frame_in = frame;
+    swap_queue_next(s, &frame);
+
+    /*if (!frame && s->q[0]) {
+        frame = av_frame_clone(s->q[0]);
+    }*/
+
+    if (!frame) {
+        frame = ff_get_video_buffer(outlink, outlink->w, outlink->h);
+        if (!frame)
+            return AVERROR(ENOMEM);
+        ff_fill_rectangle(&s->draw, &s->color,
+                          frame->data, frame->linesize,
+                          0, 0, frame->width, frame->height);
+    }
+    if (frame_in) {
+        frame->pts = frame_in->pts;
+    } else {
+        frame->pts = s->pts + av_rescale_q(1, av_inv_q(outlink->frame_rate),
+                                           outlink->time_base);
+    }
+    s->pts = frame->pts;
+    return ff_filter_frame(outlink, frame);
+}
+
+static int activate(AVFilterContext *ctx)
+{
+    AVFilterLink *inlink = ctx->inputs[0];
+    AVFilterLink *outlink = ctx->outputs[0];
+    DelayContext *s = ctx->priv;
+    AVFrame *frame = NULL;
+    int ret, status;
+    int64_t pts;
+
+    FF_FILTER_FORWARD_STATUS_BACK(outlink, inlink);
+
+    if (s->eof) {
+        if (queue_peek(s)) {
+            return filter_frame(ctx, frame);
+        } else {
+            ff_outlink_set_status(outlink, AVERROR_EOF, s->pts);
+            return 0;
+        }
+    } else {
+        ret = ff_inlink_consume_frame(inlink, &frame);
+        if (ret < 0) {
+            return ret;
+        }
+        if (ret > 0) {
+            return filter_frame(ctx, frame);
+        }
+    }
+
+    if (!s->eof && ff_inlink_acknowledge_status(inlink, &status, &pts)) {
+        if (status == AVERROR_EOF) {
+            s->eof = 1;
+            s->pts = pts;
+            if (s->q_fill == 0) {
+                ff_outlink_set_status(outlink, status, pts);
+                return 0;
+            }
+        }
+    }
+
+    if (s->eof && ff_outlink_frame_wanted(outlink)) {
+        if (s->q_fill != 0) {
+            return filter_frame(ctx, NULL);
+        } else {
+            ff_outlink_set_status(outlink, AVERROR_EOF, s->pts);
+            return 0;
+        }
+    }
+
+    FF_FILTER_FORWARD_WANTED(outlink, inlink);
+
+    return FFERROR_NOT_READY;
+}
+
+static int config_input(AVFilterLink *inlink)
+{
+    AVFilterContext *ctx = inlink->dst;
+    DelayContext *s = ctx->priv;
+
+    ff_draw_init(&s->draw, inlink->format, 0);
+    ff_draw_color(&s->draw, &s->color, s->rgba_color);
+
+    return 0;
+}
+
+static int init(AVFilterContext *ctx)
+{
+    DelayContext *s = ctx->priv;
+    if (s->delay < 1) {
+        av_log(ctx, AV_LOG_ERROR, "The minimum video delay is 1 frame.\n");
+        return AVERROR(EINVAL);
+    }
+    s->q = av_mallocz(s->delay * sizeof(AVFrame *));
+    if (!s->q) {
+        return AVERROR(ENOMEM);
+    }
+    return 0;
+}
+
+static void uninit(AVFilterContext *ctx)
+{
+    DelayContext *s = ctx->priv;
+    int i;
+
+    for (i=0; i < s->delay; i++) {
+        if (s->q[i]) {
+            av_frame_free(&s->q[i]);
+        }
+    }
+    av_free(s->q);
+}
+
+static const AVFilterPad delay_inputs[] = {
+    {
+        .name         = "default",
+        .type         = AVMEDIA_TYPE_VIDEO,
+        .config_props = config_input,
+    },
+    { NULL }
+};
+
+static const AVFilterPad delay_outputs[] = {
+    {
+        .name = "default",
+        .type = AVMEDIA_TYPE_VIDEO,
+    },
+    { NULL }
+};
+
+AVFilter ff_vf_delay = {
+    .name          = "delay",
+    .description   = NULL_IF_CONFIG_SMALL("Delay video frames."),
+    .priv_size     = sizeof(DelayContext),
+    .priv_class    = &delay_class,
+    .query_formats = query_formats,
+    .activate      = activate,
+    .init          = init,
+    .uninit        = uninit,
+    .inputs        = delay_inputs,
+    .outputs       = delay_outputs,
+};
-- 
2.7.4

_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
http://ffmpeg.org/mailman/listinfo/ffmpeg-devel

Reply via email to