Hi,

attached patched for HEAD and n3.3 that implemented NewTek NDI, standard created by NewTek to make it easy to develop video-related products that share video on a local Ethernet network.

--
Maksym Veremeyenko

>From d5df13750d029d5314be24c9b1cb7306947507c1 Mon Sep 17 00:00:00 2001
From: Maksym Veremeyenko <ve...@m1.tv>
Date: Sun, 23 Jul 2017 04:03:31 -0400
Subject: [PATCH] Implement NewTek NDI support

---
 configure                |   9 +
 doc/indevs.texi          |  54 +++++
 doc/outdevs.texi         |  36 ++++
 libavdevice/Makefile     |   2 +
 libavdevice/alldevices.c |   1 +
 libavdevice/ndi_dec.c    | 499 +++++++++++++++++++++++++++++++++++++++++++++++
 libavdevice/ndi_enc.c    | 289 +++++++++++++++++++++++++++
 7 files changed, 890 insertions(+)
 create mode 100644 libavdevice/ndi_dec.c
 create mode 100644 libavdevice/ndi_enc.c

diff --git a/configure b/configure
index 5811ee1..f267b07 100755
--- a/configure
+++ b/configure
@@ -277,6 +277,7 @@ External library support:
   --enable-libzvbi         enable teletext support via libzvbi [no]
   --disable-lzma           disable lzma [autodetect]
   --enable-decklink        enable Blackmagic DeckLink I/O support [no]
+  --enable-ndi             enable Newteck NDI I/O support [no]
   --enable-mediacodec      enable Android MediaCodec support [no]
   --enable-libmysofa       enable libmysofa, needed for sofalizer filter [no]
   --enable-openal          enable OpenAL 1.1 capture support [no]
@@ -1507,6 +1508,7 @@ EXTERNAL_LIBRARY_GPL_LIST="
 
 EXTERNAL_LIBRARY_NONFREE_LIST="
     decklink
+    ndi
     libfdk_aac
     openssl
 "
@@ -3012,6 +3014,10 @@ decklink_indev_deps="decklink threads"
 decklink_indev_extralibs="-lstdc++"
 decklink_outdev_deps="decklink threads"
 decklink_outdev_extralibs="-lstdc++"
+ndi_indev_deps="ndi pthreads"
+ndi_indev_extralibs="-lndi"
+ndi_outdev_deps="ndi pthreads"
+ndi_outdev_extralibs="-lndi"
 dshow_indev_deps="IBaseFilter"
 dshow_indev_extralibs="-lpsapi -lole32 -lstrmiids -luuid -loleaut32 -lshlwapi"
 dv1394_indev_deps="dv1394"
@@ -5564,6 +5570,8 @@ avisynth_demuxer_extralibs='$ldl'
 cuda_extralibs='$ldl'
 decklink_outdev_extralibs="$decklink_outdev_extralibs $ldl"
 decklink_indev_extralibs="$decklink_indev_extralibs $ldl"
+ndi_outdev_extralibs="$ndi_outdev_extralibs $ldl"
+ndi_indev_extralibs="$ndi_indev_extralibs $ldl"
 frei0r_filter_extralibs='$ldl'
 frei0r_src_filter_extralibs='$ldl'
 ladspa_filter_extralibs='$ldl'
@@ -5822,6 +5830,7 @@ enabled coreimage_filter  && { check_header_objcc QuartzCore/CoreImage.h || disa
 enabled coreimagesrc_filter && { check_header_objcc QuartzCore/CoreImage.h || disable coreimagesrc_filter; }
 enabled decklink          && { { check_header DeckLinkAPI.h || die "ERROR: DeckLinkAPI.h header not found"; } &&
                                { check_cpp_condition DeckLinkAPIVersion.h "BLACKMAGIC_DECKLINK_API_VERSION >= 0x0a060100" || die "ERROR: Decklink API version must be >= 10.6.1."; } }
+enabled ndi               && { check_header Processing.NDI.Lib.h || die "ERROR: Processing.NDI.Lib.h header not found"; }
 enabled frei0r            && { check_header frei0r.h || die "ERROR: frei0r.h header not found"; }
 enabled gmp               && require gmp gmp.h mpz_export -lgmp
 enabled gnutls            && require_pkg_config gnutls gnutls/gnutls.h gnutls_global_init
diff --git a/doc/indevs.texi b/doc/indevs.texi
index 09e3321..7e9a5f1 100644
--- a/doc/indevs.texi
+++ b/doc/indevs.texi
@@ -327,6 +327,60 @@ ffmpeg -channels 16 -format_code Hi50 -f decklink -i 'UltraStudio Mini Recorder'
 
 @end itemize
 
+@section ndi
+
+The ndi input device provides capture capabilities for using NDI (Network
+Device Interface, standard created by NewTek).
+
+To enable this input device, you need the NDI SDK and you
+need to configure with the appropriate @code{--extra-cflags}
+and @code{--extra-ldflags}.
+
+@subsection Options
+
+@table @option
+
+@item find_sources
+If set to @option{true}, print a list of found/available NDI sources and exit.
+Defaults to @option{false}.
+
+@item wait_sources
+Override time to wait until the number of online sources have changed (ms).
+Defaults to @option{500}.
+
+@item allow_video_fields
+When this flag is @option{false}, all video that you receive will be progressive.
+Defaults to @option{1}.
+
+@item max_frames_probe
+Maximum frames for probe input format.
+Defaults to @option{25}.
+
+@item reference_level
+The audio reference level in dB. This specifies how many dB above the
+reference level (+4dBU) is the full range of 16 bit audio.
+Defaults to @option{0}.
+
+@end table
+
+@subsection Examples
+
+@itemize
+
+@item
+List input devices:
+@example
+ffmpeg -f ndi -find_sources 1 -i foo
+@end example
+
+@item
+Restream to NDI:
+@example
+ffmpeg -f ndi -i "DEV-5.INTERNAL.M1STEREO.TV (NDI_SOURCE_NAME_1)" -acodec copy -vcodec wrapped_avframe -f ndi -y NDI_SOURCE_NAME_2
+@end example
+
+@end itemize
+
 @section dshow
 
 Windows DirectShow input device.
diff --git a/doc/outdevs.texi b/doc/outdevs.texi
index df41cc8..7830cc7 100644
--- a/doc/outdevs.texi
+++ b/doc/outdevs.texi
@@ -182,6 +182,42 @@ ffmpeg -i test.avi -f decklink -pix_fmt uyvy422 -s 720x486 -r 24000/1001 'DeckLi
 
 @end itemize
 
+@section ndi
+
+The ndi output device provides playback capabilities for using NDI (Network
+Device Interface, standard created by NewTek).
+
+To enable this output device, you need the NDI SDK and you
+need to configure with the appropriate @code{--extra-cflags}
+and @code{--extra-ldflags}.
+
+NDI is very picky about the formats it supports. Pixel format is always
+uyvy422, framerate, field order and video size must be determined for your
+device with @command{-list_formats 1}. Audio sample rate is always 48 kHz.
+
+@subsection Options
+
+@table @option
+
+@item reference_level
+The audio reference level in dB. This specifies how many dB above the
+reference level (+4dBU) is the full range of 16 bit audio.
+Defaults to @option{0}.
+
+@end table
+
+@subsection Examples
+
+@itemize
+
+@item
+Play video clip:
+@example
+ffmpeg -i "udp://@239.1.1.1:10480?fifo_size=1000000&overrun_nonfatal=1" -vf "scale=720:576,fps=fps=25,setdar=dar=16/9,format=pix_fmts=uyvy422" -vcodec wrapped_avframe -f ndi NEW_NDI1
+@end example
+
+@end itemize
+
 @section fbdev
 
 Linux framebuffer output device.
diff --git a/libavdevice/Makefile b/libavdevice/Makefile
index 1d4e9e6..a7e0a18 100644
--- a/libavdevice/Makefile
+++ b/libavdevice/Makefile
@@ -19,6 +19,8 @@ OBJS-$(CONFIG_BKTR_INDEV)                += bktr.o
 OBJS-$(CONFIG_CACA_OUTDEV)               += caca.o
 OBJS-$(CONFIG_DECKLINK_OUTDEV)           += decklink_enc.o decklink_enc_c.o decklink_common.o
 OBJS-$(CONFIG_DECKLINK_INDEV)            += decklink_dec.o decklink_dec_c.o decklink_common.o
+OBJS-$(CONFIG_NDI_OUTDEV)                += ndi_enc.o
+OBJS-$(CONFIG_NDI_INDEV)                 += ndi_dec.o
 OBJS-$(CONFIG_DSHOW_INDEV)               += dshow_crossbar.o dshow.o dshow_enummediatypes.o \
                                             dshow_enumpins.o dshow_filter.o \
                                             dshow_pin.o dshow_common.o
diff --git a/libavdevice/alldevices.c b/libavdevice/alldevices.c
index a8ed53a..bbea836 100644
--- a/libavdevice/alldevices.c
+++ b/libavdevice/alldevices.c
@@ -46,6 +46,7 @@ static void register_all(void)
     REGISTER_INDEV   (BKTR,             bktr);
     REGISTER_OUTDEV  (CACA,             caca);
     REGISTER_INOUTDEV(DECKLINK,         decklink);
+    REGISTER_INOUTDEV(NDI,              ndi);
     REGISTER_INDEV   (DSHOW,            dshow);
     REGISTER_INDEV   (DV1394,           dv1394);
     REGISTER_INOUTDEV(FBDEV,            fbdev);
diff --git a/libavdevice/ndi_dec.c b/libavdevice/ndi_dec.c
new file mode 100644
index 0000000..7454ca8
--- /dev/null
+++ b/libavdevice/ndi_dec.c
@@ -0,0 +1,499 @@
+/*
+ * NDI input
+ * Copyright (c) 2017 Maksym Veremeyenko
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavformat/avformat.h"
+#include "libavformat/internal.h"
+#include "libavutil/opt.h"
+#include "libavutil/imgutils.h"
+
+#include <pthread.h>
+//#include <semaphore.h>
+
+#include <Processing.NDI.Lib.h>
+
+typedef struct AVPacketQueue {
+    AVPacketList *first_pkt, *last_pkt;
+    int nb_packets;
+    unsigned long long size;
+    int abort_request;
+    pthread_mutex_t mutex;
+    pthread_cond_t cond;
+    AVFormatContext *avctx;
+} AVPacketQueue;
+
+static void avpacket_queue_init(AVFormatContext *avctx, AVPacketQueue *q)
+{
+    memset(q, 0, sizeof(AVPacketQueue));
+    pthread_mutex_init(&q->mutex, NULL);
+    pthread_cond_init(&q->cond, NULL);
+    q->avctx = avctx;
+}
+
+static void avpacket_queue_flush(AVPacketQueue *q)
+{
+    AVPacketList *pkt, *pkt1;
+
+    pthread_mutex_lock(&q->mutex);
+    for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
+        pkt1 = pkt->next;
+        av_packet_unref(&pkt->pkt);
+        av_freep(&pkt);
+    }
+    q->last_pkt   = NULL;
+    q->first_pkt  = NULL;
+    q->nb_packets = 0;
+    q->size       = 0;
+    pthread_mutex_unlock(&q->mutex);
+}
+
+static void avpacket_queue_end(AVPacketQueue *q)
+{
+    avpacket_queue_flush(q);
+    pthread_mutex_destroy(&q->mutex);
+    pthread_cond_destroy(&q->cond);
+}
+
+static unsigned long long avpacket_queue_size(AVPacketQueue *q)
+{
+    unsigned long long size;
+    pthread_mutex_lock(&q->mutex);
+    size = q->size;
+    pthread_mutex_unlock(&q->mutex);
+    return size;
+}
+
+static int avpacket_queue_put(AVPacketQueue *q, AVPacket *pkt)
+{
+    AVPacketList *pkt1;
+
+    // Drop Packet if queue size is > 1GB
+    if (avpacket_queue_size(q) >  1024 * 1024 * 1024 ) {
+        av_log(q->avctx, AV_LOG_WARNING,  "%s: input buffer overrun!\n", __FUNCTION__);
+        return -1;
+    }
+    /* duplicate the packet */
+    if (av_dup_packet(pkt) < 0) {
+        return -1;
+    }
+
+    pkt1 = (AVPacketList *)av_malloc(sizeof(AVPacketList));
+    if (!pkt1) {
+        return -1;
+    }
+    pkt1->pkt  = *pkt;
+    pkt1->next = NULL;
+
+    pthread_mutex_lock(&q->mutex);
+
+    if (!q->last_pkt) {
+        q->first_pkt = pkt1;
+    } else {
+        q->last_pkt->next = pkt1;
+    }
+
+    q->last_pkt = pkt1;
+    q->nb_packets++;
+    q->size += pkt1->pkt.size + sizeof(*pkt1);
+
+    pthread_cond_signal(&q->cond);
+
+    pthread_mutex_unlock(&q->mutex);
+    return 0;
+}
+
+static int avpacket_queue_get(AVPacketQueue *q, AVPacket *pkt, int block)
+{
+    AVPacketList *pkt1;
+    int ret;
+
+    pthread_mutex_lock(&q->mutex);
+
+    for (;; ) {
+        pkt1 = q->first_pkt;
+        if (pkt1) {
+            q->first_pkt = pkt1->next;
+            if (!q->first_pkt) {
+                q->last_pkt = NULL;
+            }
+            q->nb_packets--;
+            q->size -= pkt1->pkt.size + sizeof(*pkt1);
+            *pkt     = pkt1->pkt;
+            av_free(pkt1);
+            ret = 1;
+            break;
+        } else if (!block) {
+            ret = 0;
+            break;
+        } else {
+            pthread_cond_wait(&q->cond, &q->mutex);
+        }
+    }
+    pthread_mutex_unlock(&q->mutex);
+    return ret;
+}
+
+struct ndi_ctx {
+    const AVClass *cclass;
+
+    void *ctx;
+
+    /* Options */
+    int find_sources;
+    int wait_sources;
+    int allow_video_fields;
+    int max_frames_probe;
+    int reference_level;
+
+    /* Runtime */
+    NDIlib_recv_create_t* recv;
+    pthread_t reader;
+    int f_started, f_stop, dropped;
+
+    /* Streams */
+    AVStream *video_st, *audio_st;
+    AVPacketQueue queue;
+    AVFormatContext *avctx;
+    int interlaced;
+};
+
+static int ndi_enqeue_video(struct ndi_ctx *ctx, NDIlib_video_frame_t *v)
+{
+    AVPacket pkt;
+    av_init_packet(&pkt);
+
+    pkt.dts = pkt.pts = av_rescale_q(v->timecode, (AVRational){1, 10000000LL}, ctx->video_st->time_base);
+    pkt.duration = av_rescale_q(1, (AVRational){v->frame_rate_D, v->frame_rate_N}, ctx->video_st->time_base);
+
+    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
+        __FUNCTION__, pkt.dts, pkt.duration, v->timecode);
+
+    pkt.flags       |= AV_PKT_FLAG_KEY;
+    pkt.stream_index = ctx->video_st->index;
+    pkt.data         = (uint8_t *)v->p_data;
+    pkt.size         = v->yres * v->line_stride_in_bytes;
+
+    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
+        ++ctx->dropped;
+    }
+
+//    NDIlib_recv_free_video(ctx->recv, v);
+
+    return 0;
+}
+
+static int ndi_enqeue_audio(struct ndi_ctx *ctx, NDIlib_audio_frame_t *a)
+{
+    AVPacket pkt;
+    NDIlib_audio_frame_interleaved_16s_t dst;
+
+    av_init_packet(&pkt);
+
+    pkt.dts = pkt.pts = av_rescale_q(a->timecode, (AVRational){1, 10000000LL}, ctx->audio_st->time_base);
+    pkt.duration = av_rescale_q(1, (AVRational){a->no_samples, a->sample_rate}, ctx->audio_st->time_base);
+
+    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
+        __FUNCTION__, pkt.dts, pkt.duration, a->timecode);
+
+    pkt.size         = 2 * a->no_samples * a->no_channels;
+    pkt.data         = (uint8_t *)av_mallocz(pkt.size);
+
+    pkt.flags       |= AV_PKT_FLAG_KEY;
+    pkt.stream_index = ctx->audio_st->index;
+
+    dst.reference_level = 0;
+    dst.p_data = (short*)pkt.data;
+    NDIlib_util_audio_to_interleaved_16s(a, &dst);
+
+    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
+        ++ctx->dropped;
+    }
+
+    NDIlib_recv_free_audio(ctx->recv, a);
+
+    return 0;
+}
+
+static void* ndi_reader(void* p)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)p;
+
+    while (!ctx->f_stop)
+    {
+        NDIlib_video_frame_t v;
+        NDIlib_audio_frame_t a;
+        NDIlib_metadata_frame_t m;
+        NDIlib_frame_type_e t;
+
+        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
+
+        if (NDIlib_frame_type_video == t)
+            ndi_enqeue_video(ctx, &v);
+        else if (NDIlib_frame_type_audio == t)
+            ndi_enqeue_audio(ctx, &a);
+        else if (NDIlib_frame_type_metadata == t)
+            NDIlib_recv_free_metadata(ctx->recv, &m);
+    };
+
+    return NULL;
+};
+
+static int ndi_find_sources(AVFormatContext *avctx, const char* name, NDIlib_source_t *source_to_connect_to)
+{
+    unsigned int n, i, j = -1;
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    const NDIlib_source_t* ndi_srcs = NULL;
+    const NDIlib_find_create_t find_create_desc = { true, NULL };
+    NDIlib_find_instance_t ndi_find;
+
+    ndi_find = NDIlib_find_create2( &find_create_desc );
+    if (!ndi_find) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_find_create failed.\n");
+        return AVERROR(EIO);
+    }
+
+    NDIlib_find_wait_for_sources(ndi_find, ctx->wait_sources);
+    ndi_srcs = NDIlib_find_get_current_sources(ndi_find, &n);
+
+    av_log(avctx, AV_LOG_INFO, "Found %d NDI sources:\n", n);
+    for (i = 0; i < n; i++)
+    {
+        av_log(avctx, AV_LOG_INFO, "\t'%s'\t'%s'\n", ndi_srcs[i].p_ndi_name, ndi_srcs[i].p_ip_address);
+        if (!strcmp(name, ndi_srcs[i].p_ndi_name)) {
+            *source_to_connect_to = ndi_srcs[i];
+            j = i;
+        }
+    }
+
+    NDIlib_find_destroy(ndi_find);
+
+    return j;
+}
+
+static int ndi_read_header(AVFormatContext *avctx)
+{
+    int ret, i;
+    AVStream *st;
+    NDIlib_recv_create_t recv_create_desc;
+    const NDIlib_tally_t tally_state = { true, false };
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    ctx->avctx = avctx;
+
+    if (!NDIlib_initialize()) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");
+        return AVERROR(EIO);
+    }
+
+    /* Find available sources. */
+    ret = ndi_find_sources(avctx, avctx->filename, &recv_create_desc.source_to_connect_to);
+    if (ctx->find_sources) {
+        return AVERROR_EXIT;
+    }
+    if (ret < 0) {
+        return AVERROR(ENOENT);
+    }
+
+    /* Create receiver description */
+    recv_create_desc.color_format = NDIlib_recv_color_format_e_UYVY_RGBA;
+    recv_create_desc.bandwidth = NDIlib_recv_bandwidth_highest;
+    recv_create_desc.allow_video_fields = ctx->allow_video_fields;
+
+    /* Create the receiver */
+    ctx->recv = NDIlib_recv_create2(&recv_create_desc);
+    if (!ctx->recv) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_recv_create2 failed.\n");
+        return AVERROR(EIO);
+    }
+
+    /* Set tally */
+    NDIlib_recv_set_tally(ctx->recv, &tally_state);
+
+    avpacket_queue_init(avctx, &ctx->queue);
+
+    /* Probe input */
+    for (ret = 0, i = 0; i < ctx->max_frames_probe; i++)
+    {
+        NDIlib_video_frame_t v;
+        NDIlib_audio_frame_t a;
+        NDIlib_metadata_frame_t m;
+        NDIlib_frame_type_e t;
+
+        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
+
+        av_log(avctx, AV_LOG_DEBUG, "NDIlib_recv_capture[%d]=%d.\n", i, t);
+
+        if (NDIlib_frame_type_video == t) {
+            if (!ctx->video_st) {
+
+                st = avformat_new_stream(avctx, NULL);
+                if (!st) {
+                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
+                    ret = AVERROR(ENOMEM);
+                    goto error;
+                }
+
+                st->time_base.den               = v.frame_rate_N;
+                st->time_base.num               = v.frame_rate_D;
+                av_stream_set_r_frame_rate(st, av_make_q(st->time_base.den, st->time_base.num));
+                st->codecpar->codec_type        = AVMEDIA_TYPE_VIDEO;
+                st->codecpar->width             = v.xres;
+                st->codecpar->height            = v.yres;
+                st->codecpar->codec_id          = AV_CODEC_ID_RAWVIDEO;
+                st->codecpar->bit_rate          = av_rescale(v.xres * v.yres * 16, st->time_base.den, st->time_base.num);
+
+                if (NDIlib_FourCC_type_UYVY == v.FourCC || NDIlib_FourCC_type_UYVA == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_UYVY422;
+                    st->codecpar->codec_tag     = MKTAG('U', 'Y', 'V', 'Y');
+                } else if (NDIlib_FourCC_type_BGRA == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_BGRA;
+                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', 'A');
+                } else if (NDIlib_FourCC_type_BGRX == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_BGR0;
+                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', '0');
+                } else if (NDIlib_FourCC_type_RGBA == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_RGBA;
+                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', 'A');
+                } else if (NDIlib_FourCC_type_RGBX == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_RGB0;
+                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', '0');
+                } else {
+                    av_log(avctx, AV_LOG_ERROR, "Unsupported video stream format, v.FourCC=%d\n", v.FourCC);
+                    ret = AVERROR(ENOTSUP);
+                    goto error;
+                }
+
+                ctx->interlaced = v.frame_format_type == NDIlib_frame_format_type_interleaved ? 1 : 0;
+
+                avpriv_set_pts_info(st, 64, v.frame_rate_D, v.frame_rate_N);
+
+                ctx->video_st = st;
+            }
+
+            ndi_enqeue_video(ctx, &v);
+        } else if (NDIlib_frame_type_audio == t) {
+            if (!ctx->audio_st) {
+
+                st = avformat_new_stream(avctx, NULL);
+                if (!st) {
+                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
+                    ret = AVERROR(ENOMEM);
+                    goto error;
+                }
+
+                st->codecpar->codec_type        = AVMEDIA_TYPE_AUDIO;
+                st->codecpar->codec_id          = AV_CODEC_ID_PCM_S16LE;
+                st->codecpar->sample_rate       = a.sample_rate;
+                st->codecpar->channels          = a.no_channels;
+
+                avpriv_set_pts_info(st, 64, 1, a.sample_rate);  /* 64 bits pts in us */
+
+                ctx->audio_st = st;
+            }
+
+            ndi_enqeue_audio(ctx, &a);
+        } else if (NDIlib_frame_type_metadata == t) {
+
+            NDIlib_recv_free_metadata(ctx->recv, &m);
+        }
+
+        if (ctx->video_st && ctx->audio_st)
+            break;
+    }
+
+    if (ctx->video_st || ctx->audio_st)
+    {
+        ctx->f_started = 1;
+        pthread_create(&ctx->reader, NULL, ndi_reader, ctx);
+    }
+
+error:
+    return ret;
+}
+
+static int ndi_read_packet(AVFormatContext *avctx, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    AVFrame *frame = ctx->video_st ? ctx->video_st->codec->coded_frame : NULL;
+
+    avpacket_queue_get(&ctx->queue, pkt, 1);
+
+    if (frame && ctx->interlaced) {
+        frame->interlaced_frame = 1;
+        frame->top_field_first = 1;
+    }
+
+    return 0;
+
+}
+
+static int ndi_read_close(AVFormatContext *avctx)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    if (ctx->recv)
+    {
+        if (ctx->f_started)
+        {
+            ctx->f_stop = 1;
+            pthread_join(ctx->reader, NULL);
+        }
+
+        ctx->f_started = 0;
+        ctx->f_stop = 0;
+
+        avpacket_queue_end(&ctx->queue);
+        NDIlib_recv_destroy(ctx->recv);
+    };
+
+    return 0;
+}
+
+#define OFFSET(x) offsetof(struct ndi_ctx, x)
+#define DEC AV_OPT_FLAG_DECODING_PARAM
+
+static const AVOption options[] = {
+    { "find_sources", "Find available sources"  , OFFSET(find_sources), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, DEC },
+    { "wait_sources", "Time to wait until the number of online sources have changed (ms)"  , OFFSET(wait_sources), AV_OPT_TYPE_INT, { .i64 = 500 }, 100, 10000, DEC },
+    { "allow_video_fields", "When this flag is FALSE, all video that you receive will be progressive"  , OFFSET(allow_video_fields), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, DEC },
+    { "max_frames_probe", "Maximum frames for probe"  , OFFSET(max_frames_probe), AV_OPT_TYPE_INT, { .i64 = 25 }, 6, 100, DEC },
+    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, DEC | AV_OPT_FLAG_AUDIO_PARAM},
+    { NULL },
+};
+
+static const AVClass ndi_demuxer_class = {
+    .class_name = "NDI demuxer",
+    .item_name  = av_default_item_name,
+    .option     = options,
+    .version    = LIBAVUTIL_VERSION_INT,
+    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
+};
+
+AVInputFormat ff_ndi_demuxer = {
+    .name           = "ndi",
+    .long_name      = NULL_IF_CONFIG_SMALL("NDI input"),
+    .flags          = AVFMT_NOFILE,
+    .priv_class     = &ndi_demuxer_class,
+    .priv_data_size = sizeof(struct ndi_ctx),
+    .read_header   = ndi_read_header,
+    .read_packet   = ndi_read_packet,
+    .read_close    = ndi_read_close,
+};
diff --git a/libavdevice/ndi_enc.c b/libavdevice/ndi_enc.c
new file mode 100644
index 0000000..8cbdb9e
--- /dev/null
+++ b/libavdevice/ndi_enc.c
@@ -0,0 +1,289 @@
+/*
+ * NDI output
+ * Copyright (c) 2017 Maksym Veremeyenko
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavformat/avformat.h"
+#include "libavformat/internal.h"
+#include "libavutil/opt.h"
+#include "libavutil/imgutils.h"
+
+#include <Processing.NDI.Lib.h>
+
+struct ndi_ctx {
+    const AVClass *cclass;
+
+    void *ctx;
+
+    /* Options */
+    int reference_level;
+
+    /* Streams present */
+    NDIlib_video_frame_t* video;
+    NDIlib_audio_frame_interleaved_16s_t* audio;
+
+    /* Status */
+    int64_t last_pts;
+
+    NDIlib_send_instance_t ndi_send;
+    AVFrame *last_avframe;
+};
+
+static int ndi_write_trailer(AVFormatContext *avctx)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    if (ctx->ndi_send)
+    {
+        if (ctx->last_avframe) {
+            NDIlib_send_send_video_async(ctx->ndi_send, NULL);
+            av_frame_free(&ctx->last_avframe);
+        }
+
+        NDIlib_send_destroy(ctx->ndi_send);
+    }
+
+    if (ctx->video)
+        av_freep(&ctx->video);
+
+    if (ctx->audio)
+        av_freep(&ctx->audio);
+
+    return 0;
+}
+
+static int ndi_write_video_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVFrame *avframe, *tmp = (AVFrame *)pkt->data;
+
+    if (tmp->format != AV_PIX_FMT_UYVY422 ||
+        tmp->width  != ctx->video->xres ||
+        tmp->height != ctx->video->yres) {
+        av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
+        av_log(avctx, AV_LOG_ERROR, "tmp->width=%d, tmp->height=%d, ctx->video->xres=%d, ctx->video->yres=%d\n", tmp->width, tmp->height, ctx->video->xres, ctx->video->yres);
+        return AVERROR(EINVAL);
+    }
+    avframe = av_frame_clone(tmp);
+    if (!avframe) {
+        av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
+        return AVERROR(EIO);
+    }
+
+    ctx->video->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000LL});
+
+    ctx->video->line_stride_in_bytes = avframe->linesize[0] < 0
+        ? -avframe->linesize[0]
+        : avframe->linesize[0];
+    ctx->video->p_data = (avframe->linesize[0] < 0)
+        ? (void *)(avframe->data[0] + avframe->linesize[0] * (avframe->height - 1))
+        : (void *)(avframe->data[0]);
+
+    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
+        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
+
+    NDIlib_send_send_video_async(ctx->ndi_send, ctx->video);
+
+    if (ctx->last_avframe)
+        av_frame_free(&ctx->last_avframe);
+    ctx->last_avframe = avframe;
+
+    return 0;
+}
+
+static int ndi_write_audio_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    ctx->audio->p_data = (void*)pkt->data;
+    ctx->audio->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000});
+    ctx->audio->no_samples = pkt->size / (ctx->audio->no_channels << 1);
+
+    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
+        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
+
+    NDIlib_util_send_send_audio_interleaved_16s(ctx->ndi_send, ctx->audio);
+
+    return 0;
+}
+
+static int ndi_write_packet(AVFormatContext *avctx, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVStream *st = avctx->streams[pkt->stream_index];
+
+    ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
+
+    if      (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
+        return ndi_write_video_packet(avctx, st, pkt);
+    else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
+        return ndi_write_audio_packet(avctx, st, pkt);
+
+    return AVERROR(EIO);
+}
+
+static int ndi_setup_audio(AVFormatContext *avctx, AVStream *st)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVCodecParameters *c = st->codecpar;
+
+    if (ctx->audio) {
+        av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
+        return -1;
+    }
+    if (c->sample_rate != 48000) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
+               " Only 48kHz is supported.\n");
+        return -1;
+    }
+    if (c->channels != 2 && c->channels != 4 && c->channels != 8) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
+               " Only stereo and 7.1 are supported.\n");
+        return -1;
+    }
+
+    ctx->audio = (NDIlib_audio_frame_interleaved_16s_t *) av_mallocz(sizeof(NDIlib_audio_frame_interleaved_16s_t));
+
+    ctx->audio->sample_rate = c->sample_rate;
+    ctx->audio->no_channels = c->channels;
+    ctx->audio->reference_level = ctx->reference_level;
+
+    /* The device expects the sample rate to be fixed. */
+    avpriv_set_pts_info(st, 64, 1, c->sample_rate);
+
+    return 0;
+}
+
+static int ndi_setup_video(AVFormatContext *avctx, AVStream *st)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVCodecParameters *c = st->codecpar;
+    AVRational display_aspect_ratio;
+
+    if (ctx->video) {
+        av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
+        return AVERROR(ENOTSUP);
+    }
+
+    if (c->codec_id != AV_CODEC_ID_WRAPPED_AVFRAME) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported codec format!"
+               " Only AV_CODEC_ID_WRAPPED_AVFRAME is supported (-vcodec wrapped_avframe).\n");
+        return AVERROR(ENOTSUP);
+    }
+
+    if (c->format != AV_PIX_FMT_UYVY422) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
+               " Only AV_PIX_FMT_UYVY422 is supported.\n");
+        return AVERROR(ENOTSUP);
+    }
+
+    ctx->video = (NDIlib_video_frame_t *) av_mallocz(sizeof(NDIlib_video_frame_t));
+
+    ctx->video->FourCC = NDIlib_FourCC_type_UYVY;
+    ctx->video->xres = c->width;
+    ctx->video->yres = c->height;
+    ctx->video->frame_rate_N = st->time_base.den;
+    ctx->video->frame_rate_D = st->time_base.num;
+    ctx->video->frame_format_type = AV_FIELD_PROGRESSIVE == c->field_order
+        ? NDIlib_frame_format_type_progressive
+        : NDIlib_frame_format_type_interleaved;
+
+    if (st->sample_aspect_ratio.num &&
+        av_cmp_q(st->sample_aspect_ratio, st->codecpar->sample_aspect_ratio)) {
+        av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den,
+                  st->codecpar->width  * (int64_t)st->sample_aspect_ratio.num,
+                  st->codecpar->height * (int64_t)st->sample_aspect_ratio.den,
+                  1024 * 1024);
+        ctx->video->picture_aspect_ratio = av_q2d(display_aspect_ratio);
+    }
+    else
+        ctx->video->picture_aspect_ratio = 16.0 / 9.0;
+
+    /* The device expects the framerate to be fixed. */
+    avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
+
+    return 0;
+}
+
+static int ndi_write_header(AVFormatContext *avctx)
+{
+    int ret = 0;
+    unsigned int n;
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    const NDIlib_send_create_t ndi_send_desc = {avctx->filename, NULL, true, false};
+
+    /* check if streams compatible */
+    for (n = 0; n < avctx->nb_streams; n++) {
+        AVStream *st = avctx->streams[n];
+        AVCodecParameters *c = st->codecpar;
+        if        (c->codec_type == AVMEDIA_TYPE_AUDIO) {
+            if ((ret = ndi_setup_audio(avctx, st)))
+                goto error;
+        } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
+            if ((ret = ndi_setup_video(avctx, st)))
+                goto error;
+        } else {
+            av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");
+            ret = AVERROR(EIO);
+            goto error;
+        }
+    }
+
+    if (!NDIlib_initialize()) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");
+    } else {
+        ctx->ndi_send = NDIlib_send_create(&ndi_send_desc);
+        if (!ctx->ndi_send)
+            av_log(avctx, AV_LOG_ERROR, "Failed to create NDI output %s\n", avctx->filename);
+        else
+            ret = 0;
+    }
+
+error:
+    return ret;
+}
+
+#define OFFSET(x) offsetof(struct ndi_ctx, x)
+static const AVOption options[] = {
+
+    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_AUDIO_PARAM},
+    { NULL },
+};
+
+static const AVClass ndi_muxer_class = {
+    .class_name = "NDI muxer",
+    .item_name  = av_default_item_name,
+    .option     = options,
+    .version    = LIBAVUTIL_VERSION_INT,
+    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT,
+};
+
+AVOutputFormat ff_ndi_muxer = {
+    .name           = "ndi",
+    .long_name      = NULL_IF_CONFIG_SMALL("NDI output"),
+    .audio_codec    = AV_CODEC_ID_PCM_S16LE,
+    .video_codec    = AV_CODEC_ID_WRAPPED_AVFRAME,
+    .subtitle_codec = AV_CODEC_ID_NONE,
+    .flags          = AVFMT_NOFILE,
+    .priv_class     = &ndi_muxer_class,
+    .priv_data_size = sizeof(struct ndi_ctx),
+    .write_header   = ndi_write_header,
+    .write_packet   = ndi_write_packet,
+    .write_trailer  = ndi_write_trailer,
+};
-- 
1.8.3.1

>From 3e87f4f643ca589fd511da495b2e0c90d38e87a8 Mon Sep 17 00:00:00 2001
From: Maksym Veremeyenko <ve...@m1.tv>
Date: Sun, 23 Jul 2017 03:34:13 -0400
Subject: [PATCH] Implement NewTek NDI support

---
 configure                |   9 +
 doc/indevs.texi          |  54 +++++
 doc/outdevs.texi         |  36 ++++
 libavdevice/Makefile     |   2 +
 libavdevice/alldevices.c |   1 +
 libavdevice/ndi_dec.c    | 499 +++++++++++++++++++++++++++++++++++++++++++++++
 libavdevice/ndi_enc.c    | 289 +++++++++++++++++++++++++++
 7 files changed, 890 insertions(+)
 create mode 100644 libavdevice/ndi_dec.c
 create mode 100644 libavdevice/ndi_enc.c

diff --git a/configure b/configure
index 9b5789a..6cf283e 100755
--- a/configure
+++ b/configure
@@ -279,6 +279,7 @@ External library support:
   --enable-libzvbi         enable teletext support via libzvbi [no]
   --disable-lzma           disable lzma [autodetect]
   --enable-decklink        enable Blackmagic DeckLink I/O support [no]
+  --enable-ndi             enable Newteck NDI I/O support [no]
   --enable-mediacodec      enable Android MediaCodec support [no]
   --enable-netcdf          enable NetCDF, needed for sofalizer filter [no]
   --enable-openal          enable OpenAL 1.1 capture support [no]
@@ -1514,6 +1515,7 @@ EXTERNAL_LIBRARY_GPL_LIST="
 
 EXTERNAL_LIBRARY_NONFREE_LIST="
     decklink
+    ndi
     libfdk_aac
     openssl
 "
@@ -2994,6 +2996,10 @@ decklink_indev_deps="decklink pthreads"
 decklink_indev_extralibs="-lstdc++"
 decklink_outdev_deps="decklink pthreads"
 decklink_outdev_extralibs="-lstdc++"
+ndi_indev_deps="ndi pthreads"
+ndi_indev_extralibs="-lndi"
+ndi_outdev_deps="ndi pthreads"
+ndi_outdev_extralibs="-lndi"
 dshow_indev_deps="IBaseFilter"
 dshow_indev_extralibs="-lpsapi -lole32 -lstrmiids -luuid -loleaut32 -lshlwapi"
 dv1394_indev_deps="dv1394"
@@ -5507,6 +5513,8 @@ avisynth_demuxer_extralibs='$ldl'
 cuda_extralibs='$ldl'
 decklink_outdev_extralibs="$decklink_outdev_extralibs $ldl"
 decklink_indev_extralibs="$decklink_indev_extralibs $ldl"
+ndi_outdev_extralibs="$ndi_outdev_extralibs $ldl"
+ndi_indev_extralibs="$ndi_indev_extralibs $ldl"
 frei0r_filter_extralibs='$ldl'
 frei0r_src_filter_extralibs='$ldl'
 ladspa_filter_extralibs='$ldl'
@@ -5764,6 +5772,7 @@ enabled coreimage_filter  && { check_header_objcc QuartzCore/CoreImage.h || disa
 enabled coreimagesrc_filter && { check_header_objcc QuartzCore/CoreImage.h || disable coreimagesrc_filter; }
 enabled decklink          && { { check_header DeckLinkAPI.h || die "ERROR: DeckLinkAPI.h header not found"; } &&
                                { check_cpp_condition DeckLinkAPIVersion.h "BLACKMAGIC_DECKLINK_API_VERSION >= 0x0a060100" || die "ERROR: Decklink API version must be >= 10.6.1."; } }
+enabled ndi               && { check_header Processing.NDI.Lib.h || die "ERROR: Processing.NDI.Lib.h header not found"; }
 enabled frei0r            && { check_header frei0r.h || die "ERROR: frei0r.h header not found"; }
 enabled gmp               && require gmp gmp.h mpz_export -lgmp
 enabled gnutls            && require_pkg_config gnutls gnutls/gnutls.h gnutls_global_init
diff --git a/doc/indevs.texi b/doc/indevs.texi
index 51c304f..72baabd 100644
--- a/doc/indevs.texi
+++ b/doc/indevs.texi
@@ -321,6 +321,60 @@ ffmpeg -channels 16 -format_code Hi50 -f decklink -i 'UltraStudio Mini Recorder'
 
 @end itemize
 
+@section ndi
+
+The ndi input device provides capture capabilities for using NDI (Network
+Device Interface, standard created by NewTek).
+
+To enable this input device, you need the NDI SDK and you
+need to configure with the appropriate @code{--extra-cflags}
+and @code{--extra-ldflags}.
+
+@subsection Options
+
+@table @option
+
+@item find_sources
+If set to @option{true}, print a list of found/available NDI sources and exit.
+Defaults to @option{false}.
+
+@item wait_sources
+Override time to wait until the number of online sources have changed (ms).
+Defaults to @option{500}.
+
+@item allow_video_fields
+When this flag is @option{false}, all video that you receive will be progressive.
+Defaults to @option{1}.
+
+@item max_frames_probe
+Maximum frames for probe input format.
+Defaults to @option{25}.
+
+@item reference_level
+The audio reference level in dB. This specifies how many dB above the
+reference level (+4dBU) is the full range of 16 bit audio.
+Defaults to @option{0}.
+
+@end table
+
+@subsection Examples
+
+@itemize
+
+@item
+List input devices:
+@example
+ffmpeg -f ndi -find_sources 1 -i foo
+@end example
+
+@item
+Restream to NDI:
+@example
+ffmpeg -f ndi -i "DEV-5.INTERNAL.M1STEREO.TV (NDI_SOURCE_NAME_1)" -acodec copy -vcodec wrapped_avframe -f ndi -y NDI_SOURCE_NAME_2
+@end example
+
+@end itemize
+
 @section dshow
 
 Windows DirectShow input device.
diff --git a/doc/outdevs.texi b/doc/outdevs.texi
index df41cc8..7830cc7 100644
--- a/doc/outdevs.texi
+++ b/doc/outdevs.texi
@@ -182,6 +182,42 @@ ffmpeg -i test.avi -f decklink -pix_fmt uyvy422 -s 720x486 -r 24000/1001 'DeckLi
 
 @end itemize
 
+@section ndi
+
+The ndi output device provides playback capabilities for using NDI (Network
+Device Interface, standard created by NewTek).
+
+To enable this output device, you need the NDI SDK and you
+need to configure with the appropriate @code{--extra-cflags}
+and @code{--extra-ldflags}.
+
+NDI is very picky about the formats it supports. Pixel format is always
+uyvy422, framerate, field order and video size must be determined for your
+device with @command{-list_formats 1}. Audio sample rate is always 48 kHz.
+
+@subsection Options
+
+@table @option
+
+@item reference_level
+The audio reference level in dB. This specifies how many dB above the
+reference level (+4dBU) is the full range of 16 bit audio.
+Defaults to @option{0}.
+
+@end table
+
+@subsection Examples
+
+@itemize
+
+@item
+Play video clip:
+@example
+ffmpeg -i "udp://@239.1.1.1:10480?fifo_size=1000000&overrun_nonfatal=1" -vf "scale=720:576,fps=fps=25,setdar=dar=16/9,format=pix_fmts=uyvy422" -vcodec wrapped_avframe -f ndi NEW_NDI1
+@end example
+
+@end itemize
+
 @section fbdev
 
 Linux framebuffer output device.
diff --git a/libavdevice/Makefile b/libavdevice/Makefile
index 78c42e6..dc26bc5 100644
--- a/libavdevice/Makefile
+++ b/libavdevice/Makefile
@@ -19,6 +19,8 @@ OBJS-$(CONFIG_BKTR_INDEV)                += bktr.o
 OBJS-$(CONFIG_CACA_OUTDEV)               += caca.o
 OBJS-$(CONFIG_DECKLINK_OUTDEV)           += decklink_enc.o decklink_enc_c.o decklink_common.o
 OBJS-$(CONFIG_DECKLINK_INDEV)            += decklink_dec.o decklink_dec_c.o decklink_common.o
+OBJS-$(CONFIG_NDI_OUTDEV)                += ndi_enc.o
+OBJS-$(CONFIG_NDI_INDEV)                 += ndi_dec.o
 OBJS-$(CONFIG_DSHOW_INDEV)               += dshow_crossbar.o dshow.o dshow_enummediatypes.o \
                                             dshow_enumpins.o dshow_filter.o \
                                             dshow_pin.o dshow_common.o
diff --git a/libavdevice/alldevices.c b/libavdevice/alldevices.c
index 280a260..d968776 100644
--- a/libavdevice/alldevices.c
+++ b/libavdevice/alldevices.c
@@ -46,6 +46,7 @@ static void register_all(void)
     REGISTER_INDEV   (BKTR,             bktr);
     REGISTER_OUTDEV  (CACA,             caca);
     REGISTER_INOUTDEV(DECKLINK,         decklink);
+    REGISTER_INOUTDEV(NDI,              ndi);
     REGISTER_INDEV   (DSHOW,            dshow);
     REGISTER_INDEV   (DV1394,           dv1394);
     REGISTER_INOUTDEV(FBDEV,            fbdev);
diff --git a/libavdevice/ndi_dec.c b/libavdevice/ndi_dec.c
new file mode 100644
index 0000000..7454ca8
--- /dev/null
+++ b/libavdevice/ndi_dec.c
@@ -0,0 +1,499 @@
+/*
+ * NDI input
+ * Copyright (c) 2017 Maksym Veremeyenko
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavformat/avformat.h"
+#include "libavformat/internal.h"
+#include "libavutil/opt.h"
+#include "libavutil/imgutils.h"
+
+#include <pthread.h>
+//#include <semaphore.h>
+
+#include <Processing.NDI.Lib.h>
+
+typedef struct AVPacketQueue {
+    AVPacketList *first_pkt, *last_pkt;
+    int nb_packets;
+    unsigned long long size;
+    int abort_request;
+    pthread_mutex_t mutex;
+    pthread_cond_t cond;
+    AVFormatContext *avctx;
+} AVPacketQueue;
+
+static void avpacket_queue_init(AVFormatContext *avctx, AVPacketQueue *q)
+{
+    memset(q, 0, sizeof(AVPacketQueue));
+    pthread_mutex_init(&q->mutex, NULL);
+    pthread_cond_init(&q->cond, NULL);
+    q->avctx = avctx;
+}
+
+static void avpacket_queue_flush(AVPacketQueue *q)
+{
+    AVPacketList *pkt, *pkt1;
+
+    pthread_mutex_lock(&q->mutex);
+    for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
+        pkt1 = pkt->next;
+        av_packet_unref(&pkt->pkt);
+        av_freep(&pkt);
+    }
+    q->last_pkt   = NULL;
+    q->first_pkt  = NULL;
+    q->nb_packets = 0;
+    q->size       = 0;
+    pthread_mutex_unlock(&q->mutex);
+}
+
+static void avpacket_queue_end(AVPacketQueue *q)
+{
+    avpacket_queue_flush(q);
+    pthread_mutex_destroy(&q->mutex);
+    pthread_cond_destroy(&q->cond);
+}
+
+static unsigned long long avpacket_queue_size(AVPacketQueue *q)
+{
+    unsigned long long size;
+    pthread_mutex_lock(&q->mutex);
+    size = q->size;
+    pthread_mutex_unlock(&q->mutex);
+    return size;
+}
+
+static int avpacket_queue_put(AVPacketQueue *q, AVPacket *pkt)
+{
+    AVPacketList *pkt1;
+
+    // Drop Packet if queue size is > 1GB
+    if (avpacket_queue_size(q) >  1024 * 1024 * 1024 ) {
+        av_log(q->avctx, AV_LOG_WARNING,  "%s: input buffer overrun!\n", __FUNCTION__);
+        return -1;
+    }
+    /* duplicate the packet */
+    if (av_dup_packet(pkt) < 0) {
+        return -1;
+    }
+
+    pkt1 = (AVPacketList *)av_malloc(sizeof(AVPacketList));
+    if (!pkt1) {
+        return -1;
+    }
+    pkt1->pkt  = *pkt;
+    pkt1->next = NULL;
+
+    pthread_mutex_lock(&q->mutex);
+
+    if (!q->last_pkt) {
+        q->first_pkt = pkt1;
+    } else {
+        q->last_pkt->next = pkt1;
+    }
+
+    q->last_pkt = pkt1;
+    q->nb_packets++;
+    q->size += pkt1->pkt.size + sizeof(*pkt1);
+
+    pthread_cond_signal(&q->cond);
+
+    pthread_mutex_unlock(&q->mutex);
+    return 0;
+}
+
+static int avpacket_queue_get(AVPacketQueue *q, AVPacket *pkt, int block)
+{
+    AVPacketList *pkt1;
+    int ret;
+
+    pthread_mutex_lock(&q->mutex);
+
+    for (;; ) {
+        pkt1 = q->first_pkt;
+        if (pkt1) {
+            q->first_pkt = pkt1->next;
+            if (!q->first_pkt) {
+                q->last_pkt = NULL;
+            }
+            q->nb_packets--;
+            q->size -= pkt1->pkt.size + sizeof(*pkt1);
+            *pkt     = pkt1->pkt;
+            av_free(pkt1);
+            ret = 1;
+            break;
+        } else if (!block) {
+            ret = 0;
+            break;
+        } else {
+            pthread_cond_wait(&q->cond, &q->mutex);
+        }
+    }
+    pthread_mutex_unlock(&q->mutex);
+    return ret;
+}
+
+struct ndi_ctx {
+    const AVClass *cclass;
+
+    void *ctx;
+
+    /* Options */
+    int find_sources;
+    int wait_sources;
+    int allow_video_fields;
+    int max_frames_probe;
+    int reference_level;
+
+    /* Runtime */
+    NDIlib_recv_create_t* recv;
+    pthread_t reader;
+    int f_started, f_stop, dropped;
+
+    /* Streams */
+    AVStream *video_st, *audio_st;
+    AVPacketQueue queue;
+    AVFormatContext *avctx;
+    int interlaced;
+};
+
+static int ndi_enqeue_video(struct ndi_ctx *ctx, NDIlib_video_frame_t *v)
+{
+    AVPacket pkt;
+    av_init_packet(&pkt);
+
+    pkt.dts = pkt.pts = av_rescale_q(v->timecode, (AVRational){1, 10000000LL}, ctx->video_st->time_base);
+    pkt.duration = av_rescale_q(1, (AVRational){v->frame_rate_D, v->frame_rate_N}, ctx->video_st->time_base);
+
+    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
+        __FUNCTION__, pkt.dts, pkt.duration, v->timecode);
+
+    pkt.flags       |= AV_PKT_FLAG_KEY;
+    pkt.stream_index = ctx->video_st->index;
+    pkt.data         = (uint8_t *)v->p_data;
+    pkt.size         = v->yres * v->line_stride_in_bytes;
+
+    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
+        ++ctx->dropped;
+    }
+
+//    NDIlib_recv_free_video(ctx->recv, v);
+
+    return 0;
+}
+
+static int ndi_enqeue_audio(struct ndi_ctx *ctx, NDIlib_audio_frame_t *a)
+{
+    AVPacket pkt;
+    NDIlib_audio_frame_interleaved_16s_t dst;
+
+    av_init_packet(&pkt);
+
+    pkt.dts = pkt.pts = av_rescale_q(a->timecode, (AVRational){1, 10000000LL}, ctx->audio_st->time_base);
+    pkt.duration = av_rescale_q(1, (AVRational){a->no_samples, a->sample_rate}, ctx->audio_st->time_base);
+
+    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
+        __FUNCTION__, pkt.dts, pkt.duration, a->timecode);
+
+    pkt.size         = 2 * a->no_samples * a->no_channels;
+    pkt.data         = (uint8_t *)av_mallocz(pkt.size);
+
+    pkt.flags       |= AV_PKT_FLAG_KEY;
+    pkt.stream_index = ctx->audio_st->index;
+
+    dst.reference_level = 0;
+    dst.p_data = (short*)pkt.data;
+    NDIlib_util_audio_to_interleaved_16s(a, &dst);
+
+    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
+        ++ctx->dropped;
+    }
+
+    NDIlib_recv_free_audio(ctx->recv, a);
+
+    return 0;
+}
+
+static void* ndi_reader(void* p)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)p;
+
+    while (!ctx->f_stop)
+    {
+        NDIlib_video_frame_t v;
+        NDIlib_audio_frame_t a;
+        NDIlib_metadata_frame_t m;
+        NDIlib_frame_type_e t;
+
+        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
+
+        if (NDIlib_frame_type_video == t)
+            ndi_enqeue_video(ctx, &v);
+        else if (NDIlib_frame_type_audio == t)
+            ndi_enqeue_audio(ctx, &a);
+        else if (NDIlib_frame_type_metadata == t)
+            NDIlib_recv_free_metadata(ctx->recv, &m);
+    };
+
+    return NULL;
+};
+
+static int ndi_find_sources(AVFormatContext *avctx, const char* name, NDIlib_source_t *source_to_connect_to)
+{
+    unsigned int n, i, j = -1;
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    const NDIlib_source_t* ndi_srcs = NULL;
+    const NDIlib_find_create_t find_create_desc = { true, NULL };
+    NDIlib_find_instance_t ndi_find;
+
+    ndi_find = NDIlib_find_create2( &find_create_desc );
+    if (!ndi_find) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_find_create failed.\n");
+        return AVERROR(EIO);
+    }
+
+    NDIlib_find_wait_for_sources(ndi_find, ctx->wait_sources);
+    ndi_srcs = NDIlib_find_get_current_sources(ndi_find, &n);
+
+    av_log(avctx, AV_LOG_INFO, "Found %d NDI sources:\n", n);
+    for (i = 0; i < n; i++)
+    {
+        av_log(avctx, AV_LOG_INFO, "\t'%s'\t'%s'\n", ndi_srcs[i].p_ndi_name, ndi_srcs[i].p_ip_address);
+        if (!strcmp(name, ndi_srcs[i].p_ndi_name)) {
+            *source_to_connect_to = ndi_srcs[i];
+            j = i;
+        }
+    }
+
+    NDIlib_find_destroy(ndi_find);
+
+    return j;
+}
+
+static int ndi_read_header(AVFormatContext *avctx)
+{
+    int ret, i;
+    AVStream *st;
+    NDIlib_recv_create_t recv_create_desc;
+    const NDIlib_tally_t tally_state = { true, false };
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    ctx->avctx = avctx;
+
+    if (!NDIlib_initialize()) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");
+        return AVERROR(EIO);
+    }
+
+    /* Find available sources. */
+    ret = ndi_find_sources(avctx, avctx->filename, &recv_create_desc.source_to_connect_to);
+    if (ctx->find_sources) {
+        return AVERROR_EXIT;
+    }
+    if (ret < 0) {
+        return AVERROR(ENOENT);
+    }
+
+    /* Create receiver description */
+    recv_create_desc.color_format = NDIlib_recv_color_format_e_UYVY_RGBA;
+    recv_create_desc.bandwidth = NDIlib_recv_bandwidth_highest;
+    recv_create_desc.allow_video_fields = ctx->allow_video_fields;
+
+    /* Create the receiver */
+    ctx->recv = NDIlib_recv_create2(&recv_create_desc);
+    if (!ctx->recv) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_recv_create2 failed.\n");
+        return AVERROR(EIO);
+    }
+
+    /* Set tally */
+    NDIlib_recv_set_tally(ctx->recv, &tally_state);
+
+    avpacket_queue_init(avctx, &ctx->queue);
+
+    /* Probe input */
+    for (ret = 0, i = 0; i < ctx->max_frames_probe; i++)
+    {
+        NDIlib_video_frame_t v;
+        NDIlib_audio_frame_t a;
+        NDIlib_metadata_frame_t m;
+        NDIlib_frame_type_e t;
+
+        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
+
+        av_log(avctx, AV_LOG_DEBUG, "NDIlib_recv_capture[%d]=%d.\n", i, t);
+
+        if (NDIlib_frame_type_video == t) {
+            if (!ctx->video_st) {
+
+                st = avformat_new_stream(avctx, NULL);
+                if (!st) {
+                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
+                    ret = AVERROR(ENOMEM);
+                    goto error;
+                }
+
+                st->time_base.den               = v.frame_rate_N;
+                st->time_base.num               = v.frame_rate_D;
+                av_stream_set_r_frame_rate(st, av_make_q(st->time_base.den, st->time_base.num));
+                st->codecpar->codec_type        = AVMEDIA_TYPE_VIDEO;
+                st->codecpar->width             = v.xres;
+                st->codecpar->height            = v.yres;
+                st->codecpar->codec_id          = AV_CODEC_ID_RAWVIDEO;
+                st->codecpar->bit_rate          = av_rescale(v.xres * v.yres * 16, st->time_base.den, st->time_base.num);
+
+                if (NDIlib_FourCC_type_UYVY == v.FourCC || NDIlib_FourCC_type_UYVA == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_UYVY422;
+                    st->codecpar->codec_tag     = MKTAG('U', 'Y', 'V', 'Y');
+                } else if (NDIlib_FourCC_type_BGRA == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_BGRA;
+                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', 'A');
+                } else if (NDIlib_FourCC_type_BGRX == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_BGR0;
+                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', '0');
+                } else if (NDIlib_FourCC_type_RGBA == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_RGBA;
+                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', 'A');
+                } else if (NDIlib_FourCC_type_RGBX == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_RGB0;
+                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', '0');
+                } else {
+                    av_log(avctx, AV_LOG_ERROR, "Unsupported video stream format, v.FourCC=%d\n", v.FourCC);
+                    ret = AVERROR(ENOTSUP);
+                    goto error;
+                }
+
+                ctx->interlaced = v.frame_format_type == NDIlib_frame_format_type_interleaved ? 1 : 0;
+
+                avpriv_set_pts_info(st, 64, v.frame_rate_D, v.frame_rate_N);
+
+                ctx->video_st = st;
+            }
+
+            ndi_enqeue_video(ctx, &v);
+        } else if (NDIlib_frame_type_audio == t) {
+            if (!ctx->audio_st) {
+
+                st = avformat_new_stream(avctx, NULL);
+                if (!st) {
+                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
+                    ret = AVERROR(ENOMEM);
+                    goto error;
+                }
+
+                st->codecpar->codec_type        = AVMEDIA_TYPE_AUDIO;
+                st->codecpar->codec_id          = AV_CODEC_ID_PCM_S16LE;
+                st->codecpar->sample_rate       = a.sample_rate;
+                st->codecpar->channels          = a.no_channels;
+
+                avpriv_set_pts_info(st, 64, 1, a.sample_rate);  /* 64 bits pts in us */
+
+                ctx->audio_st = st;
+            }
+
+            ndi_enqeue_audio(ctx, &a);
+        } else if (NDIlib_frame_type_metadata == t) {
+
+            NDIlib_recv_free_metadata(ctx->recv, &m);
+        }
+
+        if (ctx->video_st && ctx->audio_st)
+            break;
+    }
+
+    if (ctx->video_st || ctx->audio_st)
+    {
+        ctx->f_started = 1;
+        pthread_create(&ctx->reader, NULL, ndi_reader, ctx);
+    }
+
+error:
+    return ret;
+}
+
+static int ndi_read_packet(AVFormatContext *avctx, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    AVFrame *frame = ctx->video_st ? ctx->video_st->codec->coded_frame : NULL;
+
+    avpacket_queue_get(&ctx->queue, pkt, 1);
+
+    if (frame && ctx->interlaced) {
+        frame->interlaced_frame = 1;
+        frame->top_field_first = 1;
+    }
+
+    return 0;
+
+}
+
+static int ndi_read_close(AVFormatContext *avctx)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    if (ctx->recv)
+    {
+        if (ctx->f_started)
+        {
+            ctx->f_stop = 1;
+            pthread_join(ctx->reader, NULL);
+        }
+
+        ctx->f_started = 0;
+        ctx->f_stop = 0;
+
+        avpacket_queue_end(&ctx->queue);
+        NDIlib_recv_destroy(ctx->recv);
+    };
+
+    return 0;
+}
+
+#define OFFSET(x) offsetof(struct ndi_ctx, x)
+#define DEC AV_OPT_FLAG_DECODING_PARAM
+
+static const AVOption options[] = {
+    { "find_sources", "Find available sources"  , OFFSET(find_sources), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, DEC },
+    { "wait_sources", "Time to wait until the number of online sources have changed (ms)"  , OFFSET(wait_sources), AV_OPT_TYPE_INT, { .i64 = 500 }, 100, 10000, DEC },
+    { "allow_video_fields", "When this flag is FALSE, all video that you receive will be progressive"  , OFFSET(allow_video_fields), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, DEC },
+    { "max_frames_probe", "Maximum frames for probe"  , OFFSET(max_frames_probe), AV_OPT_TYPE_INT, { .i64 = 25 }, 6, 100, DEC },
+    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, DEC | AV_OPT_FLAG_AUDIO_PARAM},
+    { NULL },
+};
+
+static const AVClass ndi_demuxer_class = {
+    .class_name = "NDI demuxer",
+    .item_name  = av_default_item_name,
+    .option     = options,
+    .version    = LIBAVUTIL_VERSION_INT,
+    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
+};
+
+AVInputFormat ff_ndi_demuxer = {
+    .name           = "ndi",
+    .long_name      = NULL_IF_CONFIG_SMALL("NDI input"),
+    .flags          = AVFMT_NOFILE,
+    .priv_class     = &ndi_demuxer_class,
+    .priv_data_size = sizeof(struct ndi_ctx),
+    .read_header   = ndi_read_header,
+    .read_packet   = ndi_read_packet,
+    .read_close    = ndi_read_close,
+};
diff --git a/libavdevice/ndi_enc.c b/libavdevice/ndi_enc.c
new file mode 100644
index 0000000..8cbdb9e
--- /dev/null
+++ b/libavdevice/ndi_enc.c
@@ -0,0 +1,289 @@
+/*
+ * NDI output
+ * Copyright (c) 2017 Maksym Veremeyenko
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavformat/avformat.h"
+#include "libavformat/internal.h"
+#include "libavutil/opt.h"
+#include "libavutil/imgutils.h"
+
+#include <Processing.NDI.Lib.h>
+
+struct ndi_ctx {
+    const AVClass *cclass;
+
+    void *ctx;
+
+    /* Options */
+    int reference_level;
+
+    /* Streams present */
+    NDIlib_video_frame_t* video;
+    NDIlib_audio_frame_interleaved_16s_t* audio;
+
+    /* Status */
+    int64_t last_pts;
+
+    NDIlib_send_instance_t ndi_send;
+    AVFrame *last_avframe;
+};
+
+static int ndi_write_trailer(AVFormatContext *avctx)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    if (ctx->ndi_send)
+    {
+        if (ctx->last_avframe) {
+            NDIlib_send_send_video_async(ctx->ndi_send, NULL);
+            av_frame_free(&ctx->last_avframe);
+        }
+
+        NDIlib_send_destroy(ctx->ndi_send);
+    }
+
+    if (ctx->video)
+        av_freep(&ctx->video);
+
+    if (ctx->audio)
+        av_freep(&ctx->audio);
+
+    return 0;
+}
+
+static int ndi_write_video_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVFrame *avframe, *tmp = (AVFrame *)pkt->data;
+
+    if (tmp->format != AV_PIX_FMT_UYVY422 ||
+        tmp->width  != ctx->video->xres ||
+        tmp->height != ctx->video->yres) {
+        av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
+        av_log(avctx, AV_LOG_ERROR, "tmp->width=%d, tmp->height=%d, ctx->video->xres=%d, ctx->video->yres=%d\n", tmp->width, tmp->height, ctx->video->xres, ctx->video->yres);
+        return AVERROR(EINVAL);
+    }
+    avframe = av_frame_clone(tmp);
+    if (!avframe) {
+        av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
+        return AVERROR(EIO);
+    }
+
+    ctx->video->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000LL});
+
+    ctx->video->line_stride_in_bytes = avframe->linesize[0] < 0
+        ? -avframe->linesize[0]
+        : avframe->linesize[0];
+    ctx->video->p_data = (avframe->linesize[0] < 0)
+        ? (void *)(avframe->data[0] + avframe->linesize[0] * (avframe->height - 1))
+        : (void *)(avframe->data[0]);
+
+    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
+        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
+
+    NDIlib_send_send_video_async(ctx->ndi_send, ctx->video);
+
+    if (ctx->last_avframe)
+        av_frame_free(&ctx->last_avframe);
+    ctx->last_avframe = avframe;
+
+    return 0;
+}
+
+static int ndi_write_audio_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    ctx->audio->p_data = (void*)pkt->data;
+    ctx->audio->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000});
+    ctx->audio->no_samples = pkt->size / (ctx->audio->no_channels << 1);
+
+    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
+        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
+
+    NDIlib_util_send_send_audio_interleaved_16s(ctx->ndi_send, ctx->audio);
+
+    return 0;
+}
+
+static int ndi_write_packet(AVFormatContext *avctx, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVStream *st = avctx->streams[pkt->stream_index];
+
+    ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
+
+    if      (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
+        return ndi_write_video_packet(avctx, st, pkt);
+    else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
+        return ndi_write_audio_packet(avctx, st, pkt);
+
+    return AVERROR(EIO);
+}
+
+static int ndi_setup_audio(AVFormatContext *avctx, AVStream *st)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVCodecParameters *c = st->codecpar;
+
+    if (ctx->audio) {
+        av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
+        return -1;
+    }
+    if (c->sample_rate != 48000) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
+               " Only 48kHz is supported.\n");
+        return -1;
+    }
+    if (c->channels != 2 && c->channels != 4 && c->channels != 8) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
+               " Only stereo and 7.1 are supported.\n");
+        return -1;
+    }
+
+    ctx->audio = (NDIlib_audio_frame_interleaved_16s_t *) av_mallocz(sizeof(NDIlib_audio_frame_interleaved_16s_t));
+
+    ctx->audio->sample_rate = c->sample_rate;
+    ctx->audio->no_channels = c->channels;
+    ctx->audio->reference_level = ctx->reference_level;
+
+    /* The device expects the sample rate to be fixed. */
+    avpriv_set_pts_info(st, 64, 1, c->sample_rate);
+
+    return 0;
+}
+
+static int ndi_setup_video(AVFormatContext *avctx, AVStream *st)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVCodecParameters *c = st->codecpar;
+    AVRational display_aspect_ratio;
+
+    if (ctx->video) {
+        av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
+        return AVERROR(ENOTSUP);
+    }
+
+    if (c->codec_id != AV_CODEC_ID_WRAPPED_AVFRAME) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported codec format!"
+               " Only AV_CODEC_ID_WRAPPED_AVFRAME is supported (-vcodec wrapped_avframe).\n");
+        return AVERROR(ENOTSUP);
+    }
+
+    if (c->format != AV_PIX_FMT_UYVY422) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
+               " Only AV_PIX_FMT_UYVY422 is supported.\n");
+        return AVERROR(ENOTSUP);
+    }
+
+    ctx->video = (NDIlib_video_frame_t *) av_mallocz(sizeof(NDIlib_video_frame_t));
+
+    ctx->video->FourCC = NDIlib_FourCC_type_UYVY;
+    ctx->video->xres = c->width;
+    ctx->video->yres = c->height;
+    ctx->video->frame_rate_N = st->time_base.den;
+    ctx->video->frame_rate_D = st->time_base.num;
+    ctx->video->frame_format_type = AV_FIELD_PROGRESSIVE == c->field_order
+        ? NDIlib_frame_format_type_progressive
+        : NDIlib_frame_format_type_interleaved;
+
+    if (st->sample_aspect_ratio.num &&
+        av_cmp_q(st->sample_aspect_ratio, st->codecpar->sample_aspect_ratio)) {
+        av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den,
+                  st->codecpar->width  * (int64_t)st->sample_aspect_ratio.num,
+                  st->codecpar->height * (int64_t)st->sample_aspect_ratio.den,
+                  1024 * 1024);
+        ctx->video->picture_aspect_ratio = av_q2d(display_aspect_ratio);
+    }
+    else
+        ctx->video->picture_aspect_ratio = 16.0 / 9.0;
+
+    /* The device expects the framerate to be fixed. */
+    avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
+
+    return 0;
+}
+
+static int ndi_write_header(AVFormatContext *avctx)
+{
+    int ret = 0;
+    unsigned int n;
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    const NDIlib_send_create_t ndi_send_desc = {avctx->filename, NULL, true, false};
+
+    /* check if streams compatible */
+    for (n = 0; n < avctx->nb_streams; n++) {
+        AVStream *st = avctx->streams[n];
+        AVCodecParameters *c = st->codecpar;
+        if        (c->codec_type == AVMEDIA_TYPE_AUDIO) {
+            if ((ret = ndi_setup_audio(avctx, st)))
+                goto error;
+        } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
+            if ((ret = ndi_setup_video(avctx, st)))
+                goto error;
+        } else {
+            av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");
+            ret = AVERROR(EIO);
+            goto error;
+        }
+    }
+
+    if (!NDIlib_initialize()) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");
+    } else {
+        ctx->ndi_send = NDIlib_send_create(&ndi_send_desc);
+        if (!ctx->ndi_send)
+            av_log(avctx, AV_LOG_ERROR, "Failed to create NDI output %s\n", avctx->filename);
+        else
+            ret = 0;
+    }
+
+error:
+    return ret;
+}
+
+#define OFFSET(x) offsetof(struct ndi_ctx, x)
+static const AVOption options[] = {
+
+    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_AUDIO_PARAM},
+    { NULL },
+};
+
+static const AVClass ndi_muxer_class = {
+    .class_name = "NDI muxer",
+    .item_name  = av_default_item_name,
+    .option     = options,
+    .version    = LIBAVUTIL_VERSION_INT,
+    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT,
+};
+
+AVOutputFormat ff_ndi_muxer = {
+    .name           = "ndi",
+    .long_name      = NULL_IF_CONFIG_SMALL("NDI output"),
+    .audio_codec    = AV_CODEC_ID_PCM_S16LE,
+    .video_codec    = AV_CODEC_ID_WRAPPED_AVFRAME,
+    .subtitle_codec = AV_CODEC_ID_NONE,
+    .flags          = AVFMT_NOFILE,
+    .priv_class     = &ndi_muxer_class,
+    .priv_data_size = sizeof(struct ndi_ctx),
+    .write_header   = ndi_write_header,
+    .write_packet   = ndi_write_packet,
+    .write_trailer  = ndi_write_trailer,
+};
-- 
1.8.3.1

_______________________________________________
ffmpeg-devel mailing list
ffmpeg-devel@ffmpeg.org
http://ffmpeg.org/mailman/listinfo/ffmpeg-devel

Reply via email to