From a4b2a4fecbb147b285cf8609d9c0144081e3c40a Mon Sep 17 00:00:00 2001 From: Arwa Arif <arwaarif1...@gmail.com> Date: Fri, 24 Oct 2014 16:49:40 +0530 Subject: [PATCH] lvafi: add xBR filter
Makefile allfilter.c --- libavfilter/Makefile | 1 + libavfilter/allfilters.c | 1 + libavfilter/vf_xbr.c | 359 ++++++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 361 insertions(+) create mode 100644 libavfilter/vf_xbr.c diff --git a/libavfilter/Makefile b/libavfilter/Makefile index 6d868e7..2c56e38 100644 --- a/libavfilter/Makefile +++ b/libavfilter/Makefile @@ -198,6 +198,7 @@ OBJS-$(CONFIG_VIDSTABDETECT_FILTER) += vidstabutils.o vf_vidstabdetect. OBJS-$(CONFIG_VIDSTABTRANSFORM_FILTER) += vidstabutils.o vf_vidstabtransform.o OBJS-$(CONFIG_VIGNETTE_FILTER) += vf_vignette.o OBJS-$(CONFIG_W3FDIF_FILTER) += vf_w3fdif.o +OBJS-$(CONFIG_XBR_FILTER) += vf_xbr.o OBJS-$(CONFIG_YADIF_FILTER) += vf_yadif.o OBJS-$(CONFIG_ZMQ_FILTER) += f_zmq.o OBJS-$(CONFIG_ZOOMPAN_FILTER) += vf_zoompan.o diff --git a/libavfilter/allfilters.c b/libavfilter/allfilters.c index d88a9ad..2352d44 100644 --- a/libavfilter/allfilters.c +++ b/libavfilter/allfilters.c @@ -213,6 +213,7 @@ void avfilter_register_all(void) REGISTER_FILTER(VIDSTABTRANSFORM, vidstabtransform, vf); REGISTER_FILTER(VIGNETTE, vignette, vf); REGISTER_FILTER(W3FDIF, w3fdif, vf); + REGISTER_FILTER(XBR, xbr, vf); REGISTER_FILTER(YADIF, yadif, vf); REGISTER_FILTER(ZMQ, zmq, vf); REGISTER_FILTER(ZOOMPAN, zoompan, vf); diff --git a/libavfilter/vf_xbr.c b/libavfilter/vf_xbr.c new file mode 100644 index 0000000..956a344 --- /dev/null +++ b/libavfilter/vf_xbr.c @@ -0,0 +1,359 @@ +/* + * This file is part of FFmpeg. + * Copyright (C) 2014 Arwa Arif arwaarif1...@gmail.com + * + * FFmpeg is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + * FFmpeg is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with FFmpeg; if not, write to the Free Software + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + */ + +/** + * @file + * XBR Filter is used for depixelization of image. + * This is based on Hyllian's 2xBR shader. + */ + +#include "libavutil/opt.h" +#include "libavutil/avassert.h" +#include "libavutil/pixdesc.h" +#include "internal.h" + +#define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM + +const int THRESHHOLD_Y = 48; +const int THRESHHOLD_U = 7; +const int THRESHHOLD_V = 6; + +/** +* Calculates the weight of difference of the pixels, by transforming these +* pixels into their Y'UV parts. It then uses the threshold used by HQx filters: +* 48*Y + 7*U + 6*V, to give it those smooth looking edges. +**/ +static int d(AVFrame *in,int x1,int y1,int x2,int y2){ + + int r1 = *(in->data[0] + y1 * in->linesize[0] + x1*3); + int g1 = *(in->data[0] + y1 * in->linesize[0] + x1*3 + 1); + int b1 = *(in->data[0] + y1 * in->linesize[0] + x1*3 + 2); + + int r2 = *(in->data[0] + y2 * in->linesize[0] + x2*3); + int g2 = *(in->data[0] + y2 * in->linesize[0] + x2*3 + 1); + int b2 = *(in->data[0] + y2 * in->linesize[0] + x2*3 + 2); + + int r = abs(r1 - r2); + int g = abs(g1 - g2); + int b = abs(b1 - b2); + + /*Convert RGB to Y'UV*/ + int y = r * .299000 + g * .587000 + b * .114000; + int u = r * -.168736 + g * -.331264 + b * .500000; + int v = r * .500000 + g * -.418688 + b * -.081312; + + /*Add HQx filters threshold & return*/ + return (y * THRESHHOLD_Y) + (u* THRESHHOLD_U) + (v* THRESHHOLD_V); +} + +/** +* Mixes a pixel A, with pixel B, with B's transperancy set to 'a' +* In other words, A is a solid color (bottom) and B is a transparent color (top) +**/ +static int mix(AVFrame *in,int x1,int y1,int x2,int y2,float a,int mode){ + /*If red color*/ + int col1,col2,temp; + if(mode==0){ + col1 = *(in->data[0] + y1 * in->linesize[0] + x1*3); + col2 = *(in->data[0] + y2 * in->linesize[0] + x2*3); + } + + /*If green color*/ + if(mode==1){ + col1 = *(in->data[0] + y1 * in->linesize[0] + x1*3 + 1); + col2 = *(in->data[0] + y2 * in->linesize[0] + x2*3 + 1); + } + + /*If blue color*/ + if(mode==2){ + col1 = *(in->data[0] + y1 * in->linesize[0] + x1*3 + 2); + col2 = *(in->data[0] + y2 * in->linesize[0] + x2*3 + 2); + } + + temp = (int)(a*col2 + (1-a)*col1); + return temp; +}; + +/** +* Applies the xBR filter rules. +**/ +static void apply_edge_detection_rules(AVFrame *in,AVFrame *out,int x,int y){ + + /* Matrix: (10 is 0,0 i.e: current pixel) + -2 | -1| 0| +1| +2 (x) + ______________________________ + -2 | [ 0][ 1][ 2] + -1 | [ 3][ 4][ 5][ 6][ 7] + 0 | [ 8][ 9][10][11][12] + +1 | [13][14][15][16][17] + +2 | [18][19][20] + | + (y)| + */ + + /*Cached Pixel Weight Difference*/ + int d_10_9 = d(in,x,y,x-1,y); + int d_10_5 = d(in,x,y,x,y-1); + int d_10_11 = d(in,x,y,x+1,y); + int d_10_15 = d(in,x,y,x,y+1); + int d_10_14 = d(in,x,y,x-1,y+1); + int d_10_6 = d(in,x,y,x+1,y-1); + int d_4_8 = d(in,x-1,y-1,x-2,y); + int d_4_1 = d(in,x-1,y-1,x,y-2); + int d_9_5 = d(in,x-1,y,x,y-1); + int d_9_15 = d(in,x-1,y,x,y+1); + int d_9_3 = d(in,x-1,y,x-2,y-1); + int d_5_11 = d(in,x,y-1,x+1,y); + int d_5_0 = d(in,x,y-1,x-1,y-2); + int d_10_4 = d(in,x,y,x-1,y-1); + int d_10_16 = d(in,x,y,x+1,y+1); + int d_6_12 = d(in,x+1,y-1,x+2,y); + int d_6_1 = d(in,x+1,y-1,x,y-2); + int d_11_15 = d(in,x+1,y,x,y+1); + int d_11_7 = d(in,x+1,y,x+2,y-1); + int d_5_2 = d(in,x,y-1,x+1,y-2); + int d_14_8 = d(in,x-1,y+1,x-2,y); + int d_14_19 = d(in,x-1,y+1,x,y+2); + int d_15_18 = d(in,x,y+1,x-1,y+2); + int d_9_13 = d(in,x-1,y,x-2,y+1); + int d_16_12 = d(in,x+1,y+1,x+2,y); + int d_16_19 = d(in,x+1,y+1,x,y+2); + int d_15_20 = d(in,x,y+1,x+1,y+2); + int d_15_17 = d(in,x,y+1,x+2,y+1); + + /** + * Note: On reading edge detection rules + * + * Each edge rule is an if..else statement, everytime on else, the + * current pixel color pointed to by matrix[0] is used to color it's edge. + * + * Each if statement checks wether the sum of weight difference on the left is + * lesser than that of the right weight differece. + */ + + /** + * Top Left Edge Detection Rule + **/ + if ((d_10_14+d_10_6+d_4_8+d_4_1+(4*d_9_5)) < (d_9_15+d_9_3+d_5_11+d_5_0+(4*d_10_4))){ + int u,v,r,g,b; + // Figure what color to blend with current pixel -->10 + if(d_10_9 <= d_10_5){ + u = x-1; + v = y; + } + else{ + u = x; + v = y-1; + } + + /*mix colors*/ + r = mix(in,u,v,x,y,.5,0); + g = mix(in,u,v,x,y,.5,1); + b = mix(in,u,v,x,y,.5,2); + + /*Insert blended color into scaledImageData*/ + *(out->data[0] + (y*2)*out->linesize[0] + (x*2)*3) = r; + *(out->data[0] + (y*2)*out->linesize[0] + (x*2)*3 + 1) = g; + *(out->data[0] + (y*2)*out->linesize[0] + (x*2)*3 + 2) = b; + + } else{ + /*Insert current pixel color into scaledImageData*/ + *(out->data[0] + (y*2)*out->linesize[0] + (x*2)*3) = *(in->data[0] + y*in->linesize[0] + x*3); + *(out->data[0] + (y*2)*out->linesize[0] + (x*2)*3 + 1) = *(in->data[0] + y*in->linesize[0] + x*3 + 1); + *(out->data[0] + (y*2)*out->linesize[0] + (x*2)*3 + 2) = *(in->data[0] + y*in->linesize[0] + x*3 + 2); + } + + /** + * Top Right Edge Detection Rule + **/ + if ((d_10_16+d_10_4+d_6_12+d_6_1+(4*d_5_11)) < (d_11_15+d_11_7+d_9_5+d_5_2+(4*d_10_6))){ + int u,v,r,g,b; + // Figure what color to blend with current pixel --> 10 + if(d_10_5 <= d_10_11){ + u = x; + v = y-1; + } + else{ + u = x+1; + v = y; + } + + /*mix colors*/ + r = mix(in,u,v,x,y,.5,0); + g = mix(in,u,v,x,y,.5,1); + b = mix(in,u,v,x,y,.5,2); + + /*Insert blended color into scaledImageData*/ + *(out->data[0] + (y*2)*out->linesize[0] + ((x*2)+1)*3) = r; + *(out->data[0] + (y*2)*out->linesize[0] + ((x*2)+1)*3 + 1) = g; + *(out->data[0] + (y*2)*out->linesize[0] + ((x*2)+1)*3 + 2) = b; + + } else{ + /*Insert current pixel color into scaledImageData*/ + *(out->data[0] + (y*2)*out->linesize[0] + ((x*2)+1)*3) = *(in->data[0] + y*in->linesize[0] + x*3); + *(out->data[0] + (y*2)*out->linesize[0] + ((x*2)+1)*3 + 1) = *(in->data[0] + y*in->linesize[0] + x*3 + 1); + *(out->data[0] + (y*2)*out->linesize[0] + ((x*2)+1)*3 + 2) = *(in->data[0] + y*in->linesize[0] + x*3 + 2); + } + + /** + * Bottom Left Edge Detection Rule + **/ + if ((d_10_4+d_10_16+d_14_8+d_14_19+(4*d_9_15)) < (d_9_5+d_9_13+d_11_15+d_15_18+(4*d_10_14))){ + int u,v,r,g,b; + // Figure what color to blend with current pixel --> 10 + if(d_10_9 <= d_10_15){ + u = x-1; + v = y; + } + else{ + u = x; + v = y+1; + } + + /*mix colors*/ + r = mix(in,u,v,x,y,.5,0); + g = mix(in,u,v,x,y,.5,1); + b = mix(in,u,v,x,y,.5,2); + + /*Insert blended color into scaledImageData*/ + *(out->data[0] + ((y*2)+1)*out->linesize[0] + (x*2)*3) = r; + *(out->data[0] + ((y*2)+1)*out->linesize[0] + (x*2)*3 + 1) = g; + *(out->data[0] + ((y*2)+1)*out->linesize[0] + (x*2)*3 + 2) = b; + + } else{ + /*Insert current pixel color into scaledImageData*/ + *(out->data[0] + ((y*2)+1)*out->linesize[0] + (x*2)*3) = *(in->data[0] + y*in->linesize[0] + x*3); + *(out->data[0] + ((y*2)+1)*out->linesize[0] + (x*2)*3 + 1) = *(in->data[0] + y*in->linesize[0] + x*3 + 1); + *(out->data[0] + ((y*2)+1)*out->linesize[0] + (x*2)*3 + 2) = *(in->data[0] + y*in->linesize[0] + x*3 + 2); + } + /** + * Bottom Right Edge Detection Rule + **/ + if ((d_10_6+d_10_14+d_16_12+d_16_19+(4*d_11_15)) < (d_9_15+d_15_20+d_15_17+d_5_11+(4*d_10_16))){ + int u,v,r,g,b; + // Figure what color to blend with current pixel --> 10 + if(d_10_11 <= d_10_15){ + u = x+1; + v = y; + } + else{ + u = x; + v = y+1; + } + + /*mix colors*/ + r = mix(in,u,v,x,y,.5,0); + g = mix(in,u,v,x,y,.5,1); + b = mix(in,u,v,x,y,.5,2); + + /*Insert blended color into scaledImageData*/ + *(out->data[0] + ((y*2)+1)*out->linesize[0] + ((x*2)+1)*3) = r; + *(out->data[0] + ((y*2)+1)*out->linesize[0] + ((x*2)+1)*3 + 1) = g; + *(out->data[0] + ((y*2)+1)*out->linesize[0] + ((x*2)+1)*3 + 2) = b; + + } else{ + /*Insert current pixel color into scaledImageData*/ + *(out->data[0] + ((y*2)+1)*out->linesize[0] + ((x*2)+1)*3) = *(in->data[0] + y*in->linesize[0] + x*3); + *(out->data[0] + ((y*2)+1)*out->linesize[0] + ((x*2)+1)*3 + 1) = *(in->data[0] + y*in->linesize[0] + x*3 + 1); + *(out->data[0] + ((y*2)+1)*out->linesize[0] + ((x*2)+1)*3 + 2) = *(in->data[0] + y*in->linesize[0] + x*3 + 2); + } +} + +static int config_output(AVFilterLink *outlink) +{ + AVFilterContext *ctx = outlink->src; + AVFilterLink *inlink = ctx->inputs[0]; + + outlink->w = inlink->w * 2 ; + outlink->h = inlink->h * 2 ; + return 0; +} + +static int query_formats(AVFilterContext *ctx) +{ + static const enum AVPixelFormat pix_fmts[] = { + AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,AV_PIX_FMT_NONE, + }; + + ff_set_common_formats(ctx, ff_make_format_list(pix_fmts)); + return 0; +} + +static int filter_frame(AVFilterLink *inlink, AVFrame *in) +{ + int i,j; + + AVFilterContext *ctx = inlink->dst; + AVFilterLink *outlink = ctx->outputs[0]; + AVFrame *out = ff_get_video_buffer(outlink, outlink->w, outlink->h); + if (!out) { + av_frame_free(&in); + return AVERROR(ENOMEM); + } + + av_frame_copy_props(out, in); + for(i=0;i<inlink->w;i++) + for(j=0;j<inlink->h;j++) + apply_edge_detection_rules(in,out,i,j); + + out->width = outlink->w; + out->height = outlink->h; + + av_frame_free(&in); + return ff_filter_frame(outlink, out); +} + +static const AVFilterPad xbr_inputs[] = { + { + .name = "default", + .type = AVMEDIA_TYPE_VIDEO, + .filter_frame = filter_frame, + }, + { NULL } +}; + +static const AVFilterPad xbr_outputs[] = { + { + .name = "default", + .type = AVMEDIA_TYPE_VIDEO, + .config_props = config_output, + }, + { NULL } +}; + +AVFilter ff_vf_xbr = { + .name = "xbr", + .description = NULL_IF_CONFIG_SMALL("Scale the input by 2 using xbr algorithm."), + .inputs = xbr_inputs, + .outputs = xbr_outputs, + .query_formats = query_formats, +}; -- 1.7.9.5
_______________________________________________ ffmpeg-devel mailing list ffmpeg-devel@ffmpeg.org http://ffmpeg.org/mailman/listinfo/ffmpeg-devel