example command line to visualize block decomposition: ./ffmpeg -export_side_data +venc_params -i input.webm -vf codecview=bs=true output.webm --- doc/filters.texi | 3 +++ libavfilter/vf_codecview.c | 41 ++++++++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+)
diff --git a/doc/filters.texi b/doc/filters.texi index 84567dec16..db2c80b1e9 100644 --- a/doc/filters.texi +++ b/doc/filters.texi @@ -7285,6 +7285,9 @@ backward predicted MVs of B-frames @item qp Display quantization parameters using the chroma planes. +@item bs +Display block structure using the luma plane. + @item mv_type, mvt Set motion vectors type to visualize. Includes MVs from all frames unless specified by @var{frame_type} option. diff --git a/libavfilter/vf_codecview.c b/libavfilter/vf_codecview.c index 331bfba777..db06625d70 100644 --- a/libavfilter/vf_codecview.c +++ b/libavfilter/vf_codecview.c @@ -34,6 +34,7 @@ #include "libavutil/opt.h" #include "avfilter.h" #include "internal.h" +#include "libavutil/video_enc_params.h" #define MV_P_FOR (1<<0) #define MV_B_FOR (1<<1) @@ -51,6 +52,7 @@ typedef struct CodecViewContext { unsigned mv_type; int hsub, vsub; int qp; + int bs; } CodecViewContext; #define OFFSET(x) offsetof(CodecViewContext, x) @@ -63,6 +65,7 @@ static const AVOption codecview_options[] = { CONST("bf", "forward predicted MVs of B-frames", MV_B_FOR, "mv"), CONST("bb", "backward predicted MVs of B-frames", MV_B_BACK, "mv"), { "qp", NULL, OFFSET(qp), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, .flags = FLAGS }, + { "bs", "set block structure to visualize", OFFSET(bs), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, .flags = FLAGS }, { "mv_type", "set motion vectors type", OFFSET(mv_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "mv_type" }, { "mvt", "set motion vectors type", OFFSET(mv_type), AV_OPT_TYPE_FLAGS, {.i64=0}, 0, INT_MAX, FLAGS, "mv_type" }, CONST("fp", "forward predicted MVs", MV_TYPE_FOR, "mv_type"), @@ -212,6 +215,30 @@ static void draw_arrow(uint8_t *buf, int sx, int sy, int ex, draw_line(buf, sx, sy, ex, ey, w, h, stride, color); } +static void draw_block_border(AVFrame *frame, AVVideoBlockParams *b) +{ + const int lzy = frame->linesize[0]; + uint8_t *py = frame->data[0] + b->src_y * lzy; + + for (int x = b->src_x; x < b->src_x + b->w; x++) { + if (x >= frame->width) + break; + py[x] = py[x] * 3 / 4; + } + for (int y = b->src_y; y < b->src_y + b->h; y++) { + if (y >= frame->height) + break; + py[b->src_x] = py[b->src_x] * 3 / 4; + py[b->src_x + b->w - 1] = py[b->src_x + b->w - 1] * 3 / 4; + py += lzy; + } + for (int x = b->src_x; x < b->src_x + b->w; x++) { + if (x >= frame->width) + break; + py[x] = py[x] * 3 / 4; + } +} + static int filter_frame(AVFilterLink *inlink, AVFrame *frame) { AVFilterContext *ctx = inlink->dst; @@ -242,6 +269,20 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *frame) } } + if (s->bs) { + AVFrameSideData *sd = av_frame_get_side_data(frame, AV_FRAME_DATA_VIDEO_ENC_PARAMS); + if (sd) { + AVVideoEncParams *par = (AVVideoEncParams*)sd->data; + + if (par->nb_blocks) { + for (int i = 0; i < par->nb_blocks; i++) { + AVVideoBlockParams *b = av_video_enc_params_block(par, i); + draw_block_border(frame, b); + } + } + } + } + if (s->mv || s->mv_type) { AVFrameSideData *sd = av_frame_get_side_data(frame, AV_FRAME_DATA_MOTION_VECTORS); if (sd) { -- 2.27.0.383.g050319c2ae-goog _______________________________________________ ffmpeg-devel mailing list ffmpeg-devel@ffmpeg.org https://ffmpeg.org/mailman/listinfo/ffmpeg-devel To unsubscribe, visit link above, or email ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".