I've added documentation for the extension of aphasemeter filter. Also, I'm not sure that "phasing" is the right word to describe the detection.
From 1e356929e878a2081add102b77a9560647232ef8 Mon Sep 17 00:00:00 2001 From: Romane Lafon <rom...@nomalab.com> Date: Wed, 3 Jul 2019 15:15:16 +0200 Subject: [PATCH] avfilter/avf_aphasemeter: Add out of phase and mono detection
Signed-off-by: Romane Lafon <rom...@nomalab.com> --- doc/filters.texi | 32 +++++++++++ libavfilter/avf_aphasemeter.c | 127 ++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 155 insertions(+), 4 deletions(-) diff --git a/doc/filters.texi b/doc/filters.texi index 700a76f239..ec8c73d558 100644 --- a/doc/filters.texi +++ b/doc/filters.texi @@ -20656,6 +20656,38 @@ Set color which will be used for drawing median phase. If color is Enable video output. Default is enabled. @end table +@subsection phasing detection + +The filter also detects out of phase and mono sequences in stereo streams. +It logs the sequence start, end and duration when it lasts longer or as long as the minimum set. + +The filter accepts the following options for this detection: + +@table @option +@item phasing +Enable mono and out of phase detection. Default is disabled. + +@item tolerance +Set phase tolerance for mono detection, in amplitude ratio. Default is @code{0}. +Allowed range is @code{[0, 1]}. + +@item angle +Set angle threshold for out of phase detection, in degree. Default is @code{170}. +Allowed range is @code{[0, 180]}. + +@item duration +Set mono or out of phase duration until notification, expressed in seconds. Default is @code{2}. + +@subsection Examples + +@itemize +@item +Complete example with @command{ffmpeg} to detect 1 second of mono with 0.001 phase tolerance: +@example +ffmpeg -i stereo.wav -af aphasemeter=video=0:phasing=1:duration=1:tolerance=0.001 -f null - +@end example +@end itemize + @section avectorscope Convert input audio to a video output, representing the audio vector diff --git a/libavfilter/avf_aphasemeter.c b/libavfilter/avf_aphasemeter.c index f497bc9969..77701e5cde 100644 --- a/libavfilter/avf_aphasemeter.c +++ b/libavfilter/avf_aphasemeter.c @@ -28,26 +28,41 @@ #include "libavutil/intreadwrite.h" #include "libavutil/opt.h" #include "libavutil/parseutils.h" +#include "libavutil/timestamp.h" #include "avfilter.h" #include "formats.h" #include "audio.h" #include "video.h" #include "internal.h" +#include "stdbool.h" +#include "float.h" typedef struct AudioPhaseMeterContext { const AVClass *class; AVFrame *out; int do_video; + int do_phasing_detection; int w, h; AVRational frame_rate; int contrast[4]; uint8_t *mpc_str; uint8_t mpc[4]; int draw_median_phase; + int is_mono; + int is_out_phase; + int start_mono_presence; + int start_out_phase_presence; + float tolerance; + float angle; + float phase; + float mono_idx[2]; + float out_phase_idx[2]; + double duration; } AudioPhaseMeterContext; #define OFFSET(x) offsetof(AudioPhaseMeterContext, x) #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM +#define get_duration(index) (index[1] - index[0]) static const AVOption aphasemeter_options[] = { { "rate", "set video rate", OFFSET(frame_rate), AV_OPT_TYPE_VIDEO_RATE, {.str="25"}, 0, INT_MAX, FLAGS }, @@ -59,6 +74,10 @@ static const AVOption aphasemeter_options[] = { { "bc", "set blue contrast", OFFSET(contrast[2]), AV_OPT_TYPE_INT, {.i64=1}, 0, 255, FLAGS }, { "mpc", "set median phase color", OFFSET(mpc_str), AV_OPT_TYPE_STRING, {.str = "none"}, 0, 0, FLAGS }, { "video", "set video output", OFFSET(do_video), AV_OPT_TYPE_BOOL, {.i64 = 1}, 0, 1, FLAGS }, + { "phasing", "set mono and out-of-phase detection output", OFFSET(do_phasing_detection), AV_OPT_TYPE_BOOL, {.i64 = 0}, 0, 1, FLAGS }, + { "tolerance", "set phase tolerance for mono detection", OFFSET(tolerance), AV_OPT_TYPE_FLOAT, {.dbl = 0.}, 0, 1, FLAGS }, + { "angle", "set angle threshold for out-of-phase detection", OFFSET(angle), AV_OPT_TYPE_FLOAT, {.dbl = 170.}, 90, 180, FLAGS }, + { "duration", "set minimum mono or out-of-phase duration in seconds", OFFSET(duration), AV_OPT_TYPE_DOUBLE, {.dbl=2.}, 0, 24*60*60, FLAGS }, { NULL } }; @@ -140,6 +159,22 @@ static inline int get_x(float phase, int w) return (phase + 1.) / 2. * (w - 1); } +static inline float get_index(AVFilterLink *inlink, AVFrame *in) +{ + char *index_str = av_ts2timestr(in->pts, &inlink->time_base); + return atof(index_str); +} + +static inline void add_metadata(AVFrame *insamples, const char *key, float value) +{ + char buf[128]; + char str[128]; + + snprintf(str, sizeof(str), "%f", value); + snprintf(buf, sizeof(buf), "lavfi.aphasemeter.%s", key); + av_dict_set(&insamples->metadata, buf, str, 0); +} + static int filter_frame(AVFilterLink *inlink, AVFrame *in) { AVFilterContext *ctx = inlink->dst; @@ -154,6 +189,10 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in) AVFrame *out; uint8_t *dst; int i; + int mono_measurement; + int out_phase_measurement; + float tolerance = 1.0f - s->tolerance; + float angle = cosf(s->angle/180.0f*M_PI); if (s->do_video && (!s->out || s->out->width != outlink->w || s->out->height != outlink->h)) { @@ -193,7 +232,7 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in) fphase += phase; } fphase /= in->nb_samples; - + s->phase = fphase; if (s->do_video) { if (s->draw_median_phase) { dst = out->data[0] + get_x(fphase, s->w) * 4; @@ -206,10 +245,64 @@ static int filter_frame(AVFilterLink *inlink, AVFrame *in) metadata = &in->metadata; if (metadata) { - uint8_t value[128]; + add_metadata(in, "phase", fphase); + } - snprintf(value, sizeof(value), "%f", fphase); - av_dict_set(metadata, "lavfi.aphasemeter.phase", value, 0); + if (s->do_phasing_detection) { + mono_measurement = (tolerance - fphase) < FLT_EPSILON; + out_phase_measurement = (angle - fphase) > FLT_EPSILON; + if (!s->is_mono && mono_measurement) { + s->is_mono = 1; + s->start_mono_presence = 1; + s->mono_idx[0] = get_index(inlink, in); + } + if (s->is_mono && mono_measurement && s->start_mono_presence) { + float mono_duration; + s->mono_idx[1] = get_index(inlink, in); + mono_duration = get_duration(s->mono_idx); + if (mono_duration >= s->duration) { + add_metadata(in, "mono_start", s->mono_idx[0]); + av_log(s, AV_LOG_INFO, "mono_start: %f\n", s->mono_idx[0]); + s->start_mono_presence = 0; + } + } + if (s->is_mono && !mono_measurement) { + float mono_duration; + s->mono_idx[1] = get_index(inlink, in); + mono_duration = get_duration(s->mono_idx); + if (mono_duration > s->duration) { + add_metadata(in, "mono_end", s->mono_idx[1]); + add_metadata(in, "mono_duration", mono_duration); + av_log(s, AV_LOG_INFO, "mono_end: %f | mono_duration: %f\n", s->mono_idx[1], mono_duration); + } + s->is_mono = 0; + } + if (!s->is_out_phase && out_phase_measurement) { + s->out_phase_idx[0] = get_index(inlink, in); + s->is_out_phase = 1; + s->start_out_phase_presence = 1; + } + if (s->is_out_phase && out_phase_measurement && s->start_out_phase_presence) { + float out_phase_duration; + s->out_phase_idx[1] = get_index(inlink, in); + out_phase_duration = get_duration(s->out_phase_idx); + if (out_phase_duration >= s->duration) { + add_metadata(in, "out_phase_start", s->out_phase_idx[0]); + av_log(s, AV_LOG_INFO, "out_phase_start: %f\n", s->out_phase_idx[0]); + s->start_out_phase_presence = 0; + } + } + if (s->is_out_phase && !out_phase_measurement) { + float out_phase_duration; + s->out_phase_idx[1] = get_index(inlink, in); + out_phase_duration = get_duration(s->out_phase_idx); + if (out_phase_duration > s->duration) { + add_metadata(in, "out_phase_end", s->out_phase_idx[1]); + add_metadata(in, "out_phase_duration", out_phase_duration); + av_log(s, AV_LOG_INFO, "out_phase_end: %f | out_phase_duration: %f\n", s->out_phase_idx[1], out_phase_duration); + } + s->is_out_phase = 0; + } } if (s->do_video) { @@ -224,6 +317,32 @@ static av_cold void uninit(AVFilterContext *ctx) AudioPhaseMeterContext *s = ctx->priv; int i; + if (s->do_phasing_detection) { + float tolerance = 1.0f - s->tolerance; + float angle = cosf(s->angle/180.0f*M_PI); + float mono_duration; + float out_phase_duration; + AVFilterLink *inlink = ctx->inputs[0]; + + if (s->is_mono) { + char *index_str = av_ts2timestr(inlink->current_pts, &inlink->time_base); + s->mono_idx[1] = atof(index_str); + mono_duration = get_duration(s->mono_idx); + if (mono_duration > s->duration) { + av_log(s, AV_LOG_INFO, "mono_end: %f | mono_duration: %f\n", s->mono_idx[1], mono_duration); + } + s->is_mono = 0; + } + if (s->is_out_phase) { + char *index_str = av_ts2timestr(inlink->current_pts, &inlink->time_base); + s->out_phase_idx[1] = atof(index_str); + out_phase_duration = get_duration(s->out_phase_idx); + if (out_phase_duration > s->duration) { + av_log(s, AV_LOG_INFO, "out_phase_end: %f | out_phase_duration: %f\n", s->out_phase_idx[1], out_phase_duration); + } + s->is_out_phase = 0; + } + } av_frame_free(&s->out); for (i = 0; i < ctx->nb_outputs; i++) av_freep(&ctx->output_pads[i].name); -- 2.11.0
_______________________________________________ ffmpeg-devel mailing list ffmpeg-devel@ffmpeg.org https://ffmpeg.org/mailman/listinfo/ffmpeg-devel To unsubscribe, visit link above, or email ffmpeg-devel-requ...@ffmpeg.org with subject "unsubscribe".