diff mbox

[FFmpeg-devel] Implement NewTek NDI support

Message ID ee692964-cdf2-2fb9-2461-1a0371e5e1f3@m1stereo.tv
State Superseded
Headers show

Commit Message

Maksym Veremeyenko July 23, 2017, 8:08 a.m. UTC
Hi,

attached patched for HEAD and n3.3 that implemented NewTek NDI, standard 
created by NewTek to make it easy to develop video-related products that 
share video on a local Ethernet network.

Comments

Nicolas George July 23, 2017, 10:20 a.m. UTC | #1
Le quintidi 5 thermidor, an CCXXV, Maksym Veremeyenko a écrit :
> attached patched for HEAD and n3.3 that implemented NewTek NDI, standard

Thanks for the patch. It looks promising.

No need to keep the patch for n3.3, new features only go in Git master.

And see comments below.

> created by NewTek to make it easy to develop video-related products that
> share video on a local Ethernet network.
> 
> -- 
> Maksym Veremeyenko
> 

> >From d5df13750d029d5314be24c9b1cb7306947507c1 Mon Sep 17 00:00:00 2001
> From: Maksym Veremeyenko <verem@m1.tv>
> Date: Sun, 23 Jul 2017 04:03:31 -0400

> Subject: [PATCH] Implement NewTek NDI support

Nit: "lavf: implement..."

> 
> ---

>  configure                |   9 +
>  doc/indevs.texi          |  54 +++++
>  doc/outdevs.texi         |  36 ++++
>  libavdevice/Makefile     |   2 +
>  libavdevice/alldevices.c |   1 +
>  libavdevice/ndi_dec.c    | 499 +++++++++++++++++++++++++++++++++++++++++++++++
>  libavdevice/ndi_enc.c    | 289 +++++++++++++++++++++++++++

Before sending the final version, remember to add a ChangeLog entry and
bump the minor version.

>  7 files changed, 890 insertions(+)
>  create mode 100644 libavdevice/ndi_dec.c
>  create mode 100644 libavdevice/ndi_enc.c
> 
> diff --git a/configure b/configure
> index 5811ee1..f267b07 100755
> --- a/configure
> +++ b/configure
> @@ -277,6 +277,7 @@ External library support:
>    --enable-libzvbi         enable teletext support via libzvbi [no]
>    --disable-lzma           disable lzma [autodetect]
>    --enable-decklink        enable Blackmagic DeckLink I/O support [no]
> +  --enable-ndi             enable Newteck NDI I/O support [no]
>    --enable-mediacodec      enable Android MediaCodec support [no]
>    --enable-libmysofa       enable libmysofa, needed for sofalizer filter [no]
>    --enable-openal          enable OpenAL 1.1 capture support [no]
> @@ -1507,6 +1508,7 @@ EXTERNAL_LIBRARY_GPL_LIST="
>  
>  EXTERNAL_LIBRARY_NONFREE_LIST="
>      decklink

> +    ndi

I would prefer libndi_newteck, or maybe just libndi. Because if this
protocol is successful, in one or two years there may be GSoC student
implementing it natively for FFmpeg, and it would get the short name.

Could they be convinced to release their code with an usable licence?

>      libfdk_aac
>      openssl
>  "
> @@ -3012,6 +3014,10 @@ decklink_indev_deps="decklink threads"
>  decklink_indev_extralibs="-lstdc++"
>  decklink_outdev_deps="decklink threads"
>  decklink_outdev_extralibs="-lstdc++"
> +ndi_indev_deps="ndi pthreads"
> +ndi_indev_extralibs="-lndi"
> +ndi_outdev_deps="ndi pthreads"
> +ndi_outdev_extralibs="-lndi"
>  dshow_indev_deps="IBaseFilter"
>  dshow_indev_extralibs="-lpsapi -lole32 -lstrmiids -luuid -loleaut32 -lshlwapi"
>  dv1394_indev_deps="dv1394"
> @@ -5564,6 +5570,8 @@ avisynth_demuxer_extralibs='$ldl'
>  cuda_extralibs='$ldl'
>  decklink_outdev_extralibs="$decklink_outdev_extralibs $ldl"
>  decklink_indev_extralibs="$decklink_indev_extralibs $ldl"
> +ndi_outdev_extralibs="$ndi_outdev_extralibs $ldl"
> +ndi_indev_extralibs="$ndi_indev_extralibs $ldl"
>  frei0r_filter_extralibs='$ldl'
>  frei0r_src_filter_extralibs='$ldl'
>  ladspa_filter_extralibs='$ldl'
> @@ -5822,6 +5830,7 @@ enabled coreimage_filter  && { check_header_objcc QuartzCore/CoreImage.h || disa
>  enabled coreimagesrc_filter && { check_header_objcc QuartzCore/CoreImage.h || disable coreimagesrc_filter; }
>  enabled decklink          && { { check_header DeckLinkAPI.h || die "ERROR: DeckLinkAPI.h header not found"; } &&
>                                 { check_cpp_condition DeckLinkAPIVersion.h "BLACKMAGIC_DECKLINK_API_VERSION >= 0x0a060100" || die "ERROR: Decklink API version must be >= 10.6.1."; } }

> +enabled ndi               && { check_header Processing.NDI.Lib.h || die "ERROR: Processing.NDI.Lib.h header not found"; }
>  enabled frei0r            && { check_header frei0r.h || die "ERROR: frei0r.h header not found"; }
>  enabled gmp               && require gmp gmp.h mpz_export -lgmp
>  enabled gnutls            && require_pkg_config gnutls gnutls/gnutls.h gnutls_global_init
> diff --git a/doc/indevs.texi b/doc/indevs.texi
> index 09e3321..7e9a5f1 100644
> --- a/doc/indevs.texi
> +++ b/doc/indevs.texi
> @@ -327,6 +327,60 @@ ffmpeg -channels 16 -format_code Hi50 -f decklink -i 'UltraStudio Mini Recorder'
>  
>  @end itemize
>  
> +@section ndi
> +
> +The ndi input device provides capture capabilities for using NDI (Network
> +Device Interface, standard created by NewTek).
> +

Please add a few words about the syntax for the input "filename".

> +To enable this input device, you need the NDI SDK and you
> +need to configure with the appropriate @code{--extra-cflags}
> +and @code{--extra-ldflags}.

Could they be convinced to provide a pkg-config file?

> +
> +@subsection Options
> +
> +@table @option
> +
> +@item find_sources
> +If set to @option{true}, print a list of found/available NDI sources and exit.
> +Defaults to @option{false}.
> +
> +@item wait_sources
> +Override time to wait until the number of online sources have changed (ms).
> +Defaults to @option{500}.
> +
> +@item allow_video_fields
> +When this flag is @option{false}, all video that you receive will be progressive.
> +Defaults to @option{1}.
> +
> +@item max_frames_probe
> +Maximum frames for probe input format.
> +Defaults to @option{25}.
> +
> +@item reference_level
> +The audio reference level in dB. This specifies how many dB above the
> +reference level (+4dBU) is the full range of 16 bit audio.
> +Defaults to @option{0}.
> +
> +@end table
> +
> +@subsection Examples
> +
> +@itemize
> +
> +@item
> +List input devices:
> +@example

> +ffmpeg -f ndi -find_sources 1 -i foo

"foo" is not very elegant; "dummy"?

> +@end example
> +
> +@item
> +Restream to NDI:
> +@example

> +ffmpeg -f ndi -i "DEV-5.INTERNAL.M1STEREO.TV (NDI_SOURCE_NAME_1)" -acodec copy -vcodec wrapped_avframe -f ndi -y NDI_SOURCE_NAME_2

Nowadays, I think we prefer "-c:a" and "-c:v".

Is -vcodec wrapped_avframe really necessary? It should be automatic.

> +@end example
> +
> +@end itemize
> +
>  @section dshow
>  
>  Windows DirectShow input device.
> diff --git a/doc/outdevs.texi b/doc/outdevs.texi
> index df41cc8..7830cc7 100644
> --- a/doc/outdevs.texi
> +++ b/doc/outdevs.texi
> @@ -182,6 +182,42 @@ ffmpeg -i test.avi -f decklink -pix_fmt uyvy422 -s 720x486 -r 24000/1001 'DeckLi
>  
>  @end itemize
>  
> +@section ndi
> +
> +The ndi output device provides playback capabilities for using NDI (Network
> +Device Interface, standard created by NewTek).
> +
> +To enable this output device, you need the NDI SDK and you
> +need to configure with the appropriate @code{--extra-cflags}
> +and @code{--extra-ldflags}.
> +
> +NDI is very picky about the formats it supports. Pixel format is always
> +uyvy422, framerate, field order and video size must be determined for your
> +device with @command{-list_formats 1}. Audio sample rate is always 48 kHz.
> +
> +@subsection Options
> +
> +@table @option
> +
> +@item reference_level
> +The audio reference level in dB. This specifies how many dB above the
> +reference level (+4dBU) is the full range of 16 bit audio.
> +Defaults to @option{0}.
> +
> +@end table
> +
> +@subsection Examples
> +
> +@itemize
> +
> +@item
> +Play video clip:
> +@example
> +ffmpeg -i "udp://@239.1.1.1:10480?fifo_size=1000000&overrun_nonfatal=1" -vf "scale=720:576,fps=fps=25,setdar=dar=16/9,format=pix_fmts=uyvy422" -vcodec wrapped_avframe -f ndi NEW_NDI1
> +@end example
> +
> +@end itemize
> +
>  @section fbdev
>  
>  Linux framebuffer output device.
> diff --git a/libavdevice/Makefile b/libavdevice/Makefile
> index 1d4e9e6..a7e0a18 100644
> --- a/libavdevice/Makefile
> +++ b/libavdevice/Makefile
> @@ -19,6 +19,8 @@ OBJS-$(CONFIG_BKTR_INDEV)                += bktr.o
>  OBJS-$(CONFIG_CACA_OUTDEV)               += caca.o
>  OBJS-$(CONFIG_DECKLINK_OUTDEV)           += decklink_enc.o decklink_enc_c.o decklink_common.o
>  OBJS-$(CONFIG_DECKLINK_INDEV)            += decklink_dec.o decklink_dec_c.o decklink_common.o
> +OBJS-$(CONFIG_NDI_OUTDEV)                += ndi_enc.o
> +OBJS-$(CONFIG_NDI_INDEV)                 += ndi_dec.o
>  OBJS-$(CONFIG_DSHOW_INDEV)               += dshow_crossbar.o dshow.o dshow_enummediatypes.o \
>                                              dshow_enumpins.o dshow_filter.o \
>                                              dshow_pin.o dshow_common.o
> diff --git a/libavdevice/alldevices.c b/libavdevice/alldevices.c
> index a8ed53a..bbea836 100644
> --- a/libavdevice/alldevices.c
> +++ b/libavdevice/alldevices.c
> @@ -46,6 +46,7 @@ static void register_all(void)
>      REGISTER_INDEV   (BKTR,             bktr);
>      REGISTER_OUTDEV  (CACA,             caca);
>      REGISTER_INOUTDEV(DECKLINK,         decklink);
> +    REGISTER_INOUTDEV(NDI,              ndi);
>      REGISTER_INDEV   (DSHOW,            dshow);
>      REGISTER_INDEV   (DV1394,           dv1394);
>      REGISTER_INOUTDEV(FBDEV,            fbdev);
> diff --git a/libavdevice/ndi_dec.c b/libavdevice/ndi_dec.c
> new file mode 100644
> index 0000000..7454ca8
> --- /dev/null
> +++ b/libavdevice/ndi_dec.c
> @@ -0,0 +1,499 @@
> +/*
> + * NDI input
> + * Copyright (c) 2017 Maksym Veremeyenko
> + *
> + * This file is part of FFmpeg.
> + *
> + * FFmpeg is free software; you can redistribute it and/or
> + * modify it under the terms of the GNU Lesser General Public
> + * License as published by the Free Software Foundation; either
> + * version 2.1 of the License, or (at your option) any later version.
> + *
> + * FFmpeg is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> + * Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public
> + * License along with FFmpeg; if not, write to the Free Software
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> + */
> +
> +#include "libavformat/avformat.h"
> +#include "libavformat/internal.h"
> +#include "libavutil/opt.h"
> +#include "libavutil/imgutils.h"
> +
> +#include <pthread.h>

> +//#include <semaphore.h>

To remove before applying.

> +
> +#include <Processing.NDI.Lib.h>
> +

> +typedef struct AVPacketQueue {
> +    AVPacketList *first_pkt, *last_pkt;
> +    int nb_packets;

> +    unsigned long long size;

Never ever use "long" in nowadays C code unless forced by an obsolescent
API. In this particular case, the correct type would be size_t; except
you put a hard limit on the size later, so plain unsigned would be
enough.

> +    int abort_request;
> +    pthread_mutex_t mutex;
> +    pthread_cond_t cond;
> +    AVFormatContext *avctx;
> +} AVPacketQueue;

This whole API looks duplicated in the encoder. Not acceptable. You need
to move it into a pair of .c/.h files, with the "ff_" prefix for the
function names (the structure name can stay AV, since it is not
exported).

But I think it would be better if you tried to use AVThreadMessageQueue
instead. You can extend it if necessary.

> +

> +static void avpacket_queue_init(AVFormatContext *avctx, AVPacketQueue *q)
> +{
> +    memset(q, 0, sizeof(AVPacketQueue));
> +    pthread_mutex_init(&q->mutex, NULL);
> +    pthread_cond_init(&q->cond, NULL);
> +    q->avctx = avctx;

avctx looks used only for logging: "void *log"?

> +}
> +
> +static void avpacket_queue_flush(AVPacketQueue *q)
> +{
> +    AVPacketList *pkt, *pkt1;
> +
> +    pthread_mutex_lock(&q->mutex);
> +    for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
> +        pkt1 = pkt->next;
> +        av_packet_unref(&pkt->pkt);
> +        av_freep(&pkt);
> +    }
> +    q->last_pkt   = NULL;
> +    q->first_pkt  = NULL;
> +    q->nb_packets = 0;
> +    q->size       = 0;
> +    pthread_mutex_unlock(&q->mutex);
> +}
> +

> +static void avpacket_queue_end(AVPacketQueue *q)

Nit: avpacket_queue_uninit(), for consistency.

> +{
> +    avpacket_queue_flush(q);
> +    pthread_mutex_destroy(&q->mutex);
> +    pthread_cond_destroy(&q->cond);
> +}
> +
> +static unsigned long long avpacket_queue_size(AVPacketQueue *q)
> +{
> +    unsigned long long size;
> +    pthread_mutex_lock(&q->mutex);
> +    size = q->size;
> +    pthread_mutex_unlock(&q->mutex);
> +    return size;
> +}
> +
> +static int avpacket_queue_put(AVPacketQueue *q, AVPacket *pkt)
> +{
> +    AVPacketList *pkt1;
> +
> +    // Drop Packet if queue size is > 1GB
> +    if (avpacket_queue_size(q) >  1024 * 1024 * 1024 ) {

> +        av_log(q->avctx, AV_LOG_WARNING,  "%s: input buffer overrun!\n", __FUNCTION__);

AV_LOG_ERROR, since it is returning an error?

> +        return -1;

Proper error code please.

> +    }
> +    /* duplicate the packet */
> +    if (av_dup_packet(pkt) < 0) {

> +        return -1;

Ditto. In this particular, you must propagate the error code from
av_dup_packet().

> +    }
> +

> +    pkt1 = (AVPacketList *)av_malloc(sizeof(AVPacketList));

Drop the cast, it is useless in C (this is not c++) and harmful (it can
hide warnings).

sizeof(*pkt1) instead of sizeof(AVPacketList).

> +    if (!pkt1) {

> +        return -1;

AVERROR(ENOMEM)

> +    }
> +    pkt1->pkt  = *pkt;
> +    pkt1->next = NULL;
> +
> +    pthread_mutex_lock(&q->mutex);
> +

> +    if (!q->last_pkt) {
> +        q->first_pkt = pkt1;
> +    } else {
> +        q->last_pkt->next = pkt1;
> +    }
> +
> +    q->last_pkt = pkt1;

There is a little known but nice simplification when implementing
linked-lists like that: instead of "last_pkt", have in the structure
"AVPacketList **tail", and you can write this whole block without a
condition:

	*q->tail = pkt1;
	q->tail = &(*q->tail)->next;

Later, you need to replace "q->last_pkt = NULL" with "q->tail =
&q->next".

> +    q->nb_packets++;
> +    q->size += pkt1->pkt.size + sizeof(*pkt1);
> +
> +    pthread_cond_signal(&q->cond);
> +
> +    pthread_mutex_unlock(&q->mutex);
> +    return 0;
> +}
> +
> +static int avpacket_queue_get(AVPacketQueue *q, AVPacket *pkt, int block)

This function looks called only with block = 1. I suggest you remove the
corresponding code, because it would be untested.

> +{
> +    AVPacketList *pkt1;
> +    int ret;
> +
> +    pthread_mutex_lock(&q->mutex);
> +

> +    for (;; ) {

Nit: spurrious space.

> +        pkt1 = q->first_pkt;
> +        if (pkt1) {
> +            q->first_pkt = pkt1->next;
> +            if (!q->first_pkt) {
> +                q->last_pkt = NULL;
> +            }
> +            q->nb_packets--;
> +            q->size -= pkt1->pkt.size + sizeof(*pkt1);
> +            *pkt     = pkt1->pkt;
> +            av_free(pkt1);
> +            ret = 1;
> +            break;
> +        } else if (!block) {
> +            ret = 0;
> +            break;
> +        } else {
> +            pthread_cond_wait(&q->cond, &q->mutex);
> +        }
> +    }
> +    pthread_mutex_unlock(&q->mutex);
> +    return ret;
> +}
> +

> +struct ndi_ctx {

Nit: NDIContext for consistency.

> +    const AVClass *cclass;
> +

> +    void *ctx;

Looks unused.

> +
> +    /* Options */
> +    int find_sources;
> +    int wait_sources;
> +    int allow_video_fields;
> +    int max_frames_probe;
> +    int reference_level;
> +
> +    /* Runtime */

> +    NDIlib_recv_create_t* recv;

Here and everywhere: nit: the pointer mark * belongs with the variable
or field, not with the type.

> +    pthread_t reader;

> +    int f_started, f_stop, dropped;

"f_"? What does it mean?

> +
> +    /* Streams */
> +    AVStream *video_st, *audio_st;
> +    AVPacketQueue queue;

> +    AVFormatContext *avctx;

All the functions are already called with avctx as a parameter, no need
for a back pointer.

> +    int interlaced;
> +};
> +
> +static int ndi_enqeue_video(struct ndi_ctx *ctx, NDIlib_video_frame_t *v)
> +{
> +    AVPacket pkt;
> +    av_init_packet(&pkt);
> +

> +    pkt.dts = pkt.pts = av_rescale_q(v->timecode, (AVRational){1, 10000000LL}, ctx->video_st->time_base);

The remark about not using "long" applies to qualified integers, of
course.

Nit: #define NDI_TIMEBASE and use it everywhere.

> +    pkt.duration = av_rescale_q(1, (AVRational){v->frame_rate_D, v->frame_rate_N}, ctx->video_st->time_base);

Unless I am mistaken, video_st->time_base is set to
(AVRational){v->frame_rate_D, v->frame_rate_N}. Therefore, I suggest to
drop this av_rescale_q() and add an av_assert1() instead (be sure to
always use --assert-level=2 when developing).

> +
> +    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",

> +        __FUNCTION__, pkt.dts, pkt.duration, v->timecode);

__FUNCTION__ is not standard, but used elsewhere in the code; __func__
is more standard and also used in the code. But printing the function
name in debug messages is not usually done in the code. Better just make
sure that the message is easily greppable.

Same below of course.

> +
> +    pkt.flags       |= AV_PKT_FLAG_KEY;
> +    pkt.stream_index = ctx->video_st->index;
> +    pkt.data         = (uint8_t *)v->p_data;
> +    pkt.size         = v->yres * v->line_stride_in_bytes;
> +
> +    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
> +        ++ctx->dropped;
> +    }
> +

> +//    NDIlib_recv_free_video(ctx->recv, v);

Cleanup?

> +
> +    return 0;
> +}
> +
> +static int ndi_enqeue_audio(struct ndi_ctx *ctx, NDIlib_audio_frame_t *a)
> +{
> +    AVPacket pkt;
> +    NDIlib_audio_frame_interleaved_16s_t dst;
> +
> +    av_init_packet(&pkt);
> +
> +    pkt.dts = pkt.pts = av_rescale_q(a->timecode, (AVRational){1, 10000000LL}, ctx->audio_st->time_base);
> +    pkt.duration = av_rescale_q(1, (AVRational){a->no_samples, a->sample_rate}, ctx->audio_st->time_base);
> +
> +    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
> +        __FUNCTION__, pkt.dts, pkt.duration, a->timecode);
> +
> +    pkt.size         = 2 * a->no_samples * a->no_channels;
> +    pkt.data         = (uint8_t *)av_mallocz(pkt.size);
> +
> +    pkt.flags       |= AV_PKT_FLAG_KEY;
> +    pkt.stream_index = ctx->audio_st->index;
> +
> +    dst.reference_level = 0;
> +    dst.p_data = (short*)pkt.data;
> +    NDIlib_util_audio_to_interleaved_16s(a, &dst);
> +
> +    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
> +        ++ctx->dropped;
> +    }
> +
> +    NDIlib_recv_free_audio(ctx->recv, a);
> +
> +    return 0;
> +}
> +

> +static void* ndi_reader(void* p)
> +{

> +    struct ndi_ctx *ctx = (struct ndi_ctx *)p;

Unnecessary (and thus harmful) cast.

> +

> +    while (!ctx->f_stop)
> +    {

Nit: braces on the same line. Same below.

> +        NDIlib_video_frame_t v;
> +        NDIlib_audio_frame_t a;
> +        NDIlib_metadata_frame_t m;
> +        NDIlib_frame_type_e t;
> +
> +        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
> +

> +        if (NDIlib_frame_type_video == t)

The test looks strange in that direction.

> +            ndi_enqeue_video(ctx, &v);
> +        else if (NDIlib_frame_type_audio == t)
> +            ndi_enqeue_audio(ctx, &a);
> +        else if (NDIlib_frame_type_metadata == t)
> +            NDIlib_recv_free_metadata(ctx->recv, &m);

Is there a risk of leak if a new type of packet is invented? Looks like
a bad API design by NewTek.

> +    };
> +
> +    return NULL;
> +};
> +
> +static int ndi_find_sources(AVFormatContext *avctx, const char* name, NDIlib_source_t *source_to_connect_to)
> +{
> +    unsigned int n, i, j = -1;

> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;

Unnecessary (and thus harmful) cast.

> +    const NDIlib_source_t* ndi_srcs = NULL;
> +    const NDIlib_find_create_t find_create_desc = { true, NULL };
> +    NDIlib_find_instance_t ndi_find;
> +
> +    ndi_find = NDIlib_find_create2( &find_create_desc );
> +    if (!ndi_find) {
> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_find_create failed.\n");
> +        return AVERROR(EIO);
> +    }
> +
> +    NDIlib_find_wait_for_sources(ndi_find, ctx->wait_sources);
> +    ndi_srcs = NDIlib_find_get_current_sources(ndi_find, &n);
> +
> +    av_log(avctx, AV_LOG_INFO, "Found %d NDI sources:\n", n);
> +    for (i = 0; i < n; i++)
> +    {
> +        av_log(avctx, AV_LOG_INFO, "\t'%s'\t'%s'\n", ndi_srcs[i].p_ndi_name, ndi_srcs[i].p_ip_address);
> +        if (!strcmp(name, ndi_srcs[i].p_ndi_name)) {
> +            *source_to_connect_to = ndi_srcs[i];

> +            j = i;

break?

> +        }
> +    }
> +
> +    NDIlib_find_destroy(ndi_find);
> +

> +    return j;

j must be unsigned, then.

> +}
> +
> +static int ndi_read_header(AVFormatContext *avctx)
> +{
> +    int ret, i;
> +    AVStream *st;
> +    NDIlib_recv_create_t recv_create_desc;
> +    const NDIlib_tally_t tally_state = { true, false };

> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;

Unnecessary (and thus harmful) cast.

> +
> +    ctx->avctx = avctx;
> +
> +    if (!NDIlib_initialize()) {
> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");

> +        return AVERROR(EIO);

AVERROR_EXTERNAL (unless the library provides error codes that could be
translated).

> +    }
> +
> +    /* Find available sources. */

> +    ret = ndi_find_sources(avctx, avctx->filename, &recv_create_desc.source_to_connect_to);
> +    if (ctx->find_sources) {

The find_sources option looks unnecessary if the sources are always
printed and it only causes an error.

> +        return AVERROR_EXIT;
> +    }
> +    if (ret < 0) {
> +        return AVERROR(ENOENT);
> +    }
> +
> +    /* Create receiver description */
> +    recv_create_desc.color_format = NDIlib_recv_color_format_e_UYVY_RGBA;
> +    recv_create_desc.bandwidth = NDIlib_recv_bandwidth_highest;
> +    recv_create_desc.allow_video_fields = ctx->allow_video_fields;
> +
> +    /* Create the receiver */
> +    ctx->recv = NDIlib_recv_create2(&recv_create_desc);
> +    if (!ctx->recv) {
> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_recv_create2 failed.\n");
> +        return AVERROR(EIO);
> +    }
> +
> +    /* Set tally */
> +    NDIlib_recv_set_tally(ctx->recv, &tally_state);
> +
> +    avpacket_queue_init(avctx, &ctx->queue);
> +

> +    /* Probe input */
> +    for (ret = 0, i = 0; i < ctx->max_frames_probe; i++)

This is already implemented, in a more generic and uniform way, in
libavformat. Drop the loop and move the format-probing code into
ndi_read_packet().

> +    {
> +        NDIlib_video_frame_t v;
> +        NDIlib_audio_frame_t a;
> +        NDIlib_metadata_frame_t m;
> +        NDIlib_frame_type_e t;
> +
> +        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
> +
> +        av_log(avctx, AV_LOG_DEBUG, "NDIlib_recv_capture[%d]=%d.\n", i, t);
> +
> +        if (NDIlib_frame_type_video == t) {
> +            if (!ctx->video_st) {
> +
> +                st = avformat_new_stream(avctx, NULL);
> +                if (!st) {
> +                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
> +                    ret = AVERROR(ENOMEM);
> +                    goto error;
> +                }
> +
> +                st->time_base.den               = v.frame_rate_N;
> +                st->time_base.num               = v.frame_rate_D;
> +                av_stream_set_r_frame_rate(st, av_make_q(st->time_base.den, st->time_base.num));
> +                st->codecpar->codec_type        = AVMEDIA_TYPE_VIDEO;
> +                st->codecpar->width             = v.xres;
> +                st->codecpar->height            = v.yres;
> +                st->codecpar->codec_id          = AV_CODEC_ID_RAWVIDEO;
> +                st->codecpar->bit_rate          = av_rescale(v.xres * v.yres * 16, st->time_base.den, st->time_base.num);
> +

> +                if (NDIlib_FourCC_type_UYVY == v.FourCC || NDIlib_FourCC_type_UYVA == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_UYVY422;

Looks strange: the A in UYVA means alpha, does it not? What happens to
the alpha channel?

> +                    st->codecpar->codec_tag     = MKTAG('U', 'Y', 'V', 'Y');
> +                } else if (NDIlib_FourCC_type_BGRA == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_BGRA;
> +                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', 'A');
> +                } else if (NDIlib_FourCC_type_BGRX == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_BGR0;
> +                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', '0');
> +                } else if (NDIlib_FourCC_type_RGBA == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_RGBA;
> +                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', 'A');
> +                } else if (NDIlib_FourCC_type_RGBX == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_RGB0;
> +                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', '0');
> +                } else {
> +                    av_log(avctx, AV_LOG_ERROR, "Unsupported video stream format, v.FourCC=%d\n", v.FourCC);
> +                    ret = AVERROR(ENOTSUP);
> +                    goto error;
> +                }
> +
> +                ctx->interlaced = v.frame_format_type == NDIlib_frame_format_type_interleaved ? 1 : 0;
> +
> +                avpriv_set_pts_info(st, 64, v.frame_rate_D, v.frame_rate_N);
> +
> +                ctx->video_st = st;
> +            }
> +
> +            ndi_enqeue_video(ctx, &v);
> +        } else if (NDIlib_frame_type_audio == t) {
> +            if (!ctx->audio_st) {
> +
> +                st = avformat_new_stream(avctx, NULL);
> +                if (!st) {
> +                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
> +                    ret = AVERROR(ENOMEM);
> +                    goto error;
> +                }
> +
> +                st->codecpar->codec_type        = AVMEDIA_TYPE_AUDIO;
> +                st->codecpar->codec_id          = AV_CODEC_ID_PCM_S16LE;
> +                st->codecpar->sample_rate       = a.sample_rate;
> +                st->codecpar->channels          = a.no_channels;
> +
> +                avpriv_set_pts_info(st, 64, 1, a.sample_rate);  /* 64 bits pts in us */
> +
> +                ctx->audio_st = st;
> +            }
> +
> +            ndi_enqeue_audio(ctx, &a);
> +        } else if (NDIlib_frame_type_metadata == t) {
> +
> +            NDIlib_recv_free_metadata(ctx->recv, &m);
> +        }
> +
> +        if (ctx->video_st && ctx->audio_st)
> +            break;
> +    }
> +
> +    if (ctx->video_st || ctx->audio_st)
> +    {
> +        ctx->f_started = 1;

> +        pthread_create(&ctx->reader, NULL, ndi_reader, ctx);

Missing error check.

> +    }
> +
> +error:
> +    return ret;
> +}
> +
> +static int ndi_read_packet(AVFormatContext *avctx, AVPacket *pkt)
> +{

> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;

Unnecessary (and thus harmful) cast.

> +

> +    AVFrame *frame = ctx->video_st ? ctx->video_st->codec->coded_frame : NULL;

This line produces a warning, does it not? Anyway, you are not supposed
to use st->codec any longer.

> +
> +    avpacket_queue_get(&ctx->queue, pkt, 1);
> +
> +    if (frame && ctx->interlaced) {
> +        frame->interlaced_frame = 1;
> +        frame->top_field_first = 1;
> +    }
> +
> +    return 0;
> +
> +}
> +
> +static int ndi_read_close(AVFormatContext *avctx)
> +{

> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;

Unnecessary (and thus harmful) cast.

> +
> +    if (ctx->recv)
> +    {
> +        if (ctx->f_started)
> +        {

> +            ctx->f_stop = 1;

This needs to be protected by a memory barrier, here and in the thread.

> +            pthread_join(ctx->reader, NULL);
> +        }
> +
> +        ctx->f_started = 0;
> +        ctx->f_stop = 0;
> +
> +        avpacket_queue_end(&ctx->queue);
> +        NDIlib_recv_destroy(ctx->recv);
> +    };
> +
> +    return 0;
> +}
> +
> +#define OFFSET(x) offsetof(struct ndi_ctx, x)
> +#define DEC AV_OPT_FLAG_DECODING_PARAM
> +
> +static const AVOption options[] = {

> +    { "find_sources", "Find available sources"  , OFFSET(find_sources), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, DEC },

AV_OPT_TYPE_BOOL

> +    { "wait_sources", "Time to wait until the number of online sources have changed (ms)"  , OFFSET(wait_sources), AV_OPT_TYPE_INT, { .i64 = 500 }, 100, 10000, DEC },

AV_OPT_TYPE_DURATION

> +    { "allow_video_fields", "When this flag is FALSE, all video that you receive will be progressive"  , OFFSET(allow_video_fields), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, DEC },

AV_OPT_TYPE_BOOL

> +    { "max_frames_probe", "Maximum frames for probe"  , OFFSET(max_frames_probe), AV_OPT_TYPE_INT, { .i64 = 25 }, 6, 100, DEC },
> +    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, DEC | AV_OPT_FLAG_AUDIO_PARAM},
> +    { NULL },
> +};
> +
> +static const AVClass ndi_demuxer_class = {
> +    .class_name = "NDI demuxer",
> +    .item_name  = av_default_item_name,
> +    .option     = options,
> +    .version    = LIBAVUTIL_VERSION_INT,
> +    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
> +};
> +
> +AVInputFormat ff_ndi_demuxer = {
> +    .name           = "ndi",

> +    .long_name      = NULL_IF_CONFIG_SMALL("NDI input"),

"Network Device Interface (NDI) input using NewTek library"

> +    .flags          = AVFMT_NOFILE,
> +    .priv_class     = &ndi_demuxer_class,
> +    .priv_data_size = sizeof(struct ndi_ctx),
> +    .read_header   = ndi_read_header,
> +    .read_packet   = ndi_read_packet,
> +    .read_close    = ndi_read_close,
> +};
> diff --git a/libavdevice/ndi_enc.c b/libavdevice/ndi_enc.c
> new file mode 100644
> index 0000000..8cbdb9e
> --- /dev/null
> +++ b/libavdevice/ndi_enc.c
> @@ -0,0 +1,289 @@
> +/*
> + * NDI output
> + * Copyright (c) 2017 Maksym Veremeyenko
> + *
> + * This file is part of FFmpeg.
> + *
> + * FFmpeg is free software; you can redistribute it and/or
> + * modify it under the terms of the GNU Lesser General Public
> + * License as published by the Free Software Foundation; either
> + * version 2.1 of the License, or (at your option) any later version.
> + *
> + * FFmpeg is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> + * Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public
> + * License along with FFmpeg; if not, write to the Free Software
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> + */
> +
> +#include "libavformat/avformat.h"
> +#include "libavformat/internal.h"
> +#include "libavutil/opt.h"
> +#include "libavutil/imgutils.h"
> +
> +#include <Processing.NDI.Lib.h>
> +
> +struct ndi_ctx {
> +    const AVClass *cclass;
> +

> +    void *ctx;

Looks unused.

> +
> +    /* Options */
> +    int reference_level;
> +
> +    /* Streams present */
> +    NDIlib_video_frame_t* video;
> +    NDIlib_audio_frame_interleaved_16s_t* audio;
> +

> +    /* Status */
> +    int64_t last_pts;

Looks unused.

> +
> +    NDIlib_send_instance_t ndi_send;
> +    AVFrame *last_avframe;
> +};
> +
> +static int ndi_write_trailer(AVFormatContext *avctx)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +
> +    if (ctx->ndi_send)
> +    {
> +        if (ctx->last_avframe) {
> +            NDIlib_send_send_video_async(ctx->ndi_send, NULL);
> +            av_frame_free(&ctx->last_avframe);
> +        }
> +
> +        NDIlib_send_destroy(ctx->ndi_send);
> +    }
> +
> +    if (ctx->video)
> +        av_freep(&ctx->video);
> +
> +    if (ctx->audio)
> +        av_freep(&ctx->audio);
> +
> +    return 0;
> +}
> +
> +static int ndi_write_video_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    AVFrame *avframe, *tmp = (AVFrame *)pkt->data;
> +
> +    if (tmp->format != AV_PIX_FMT_UYVY422 ||
> +        tmp->width  != ctx->video->xres ||
> +        tmp->height != ctx->video->yres) {
> +        av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
> +        av_log(avctx, AV_LOG_ERROR, "tmp->width=%d, tmp->height=%d, ctx->video->xres=%d, ctx->video->yres=%d\n", tmp->width, tmp->height, ctx->video->xres, ctx->video->yres);
> +        return AVERROR(EINVAL);
> +    }
> +    avframe = av_frame_clone(tmp);
> +    if (!avframe) {
> +        av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");

> +        return AVERROR(EIO);

AVERROR(ENOMEM)

> +    }
> +
> +    ctx->video->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000LL});
> +

> +    ctx->video->line_stride_in_bytes = avframe->linesize[0] < 0
> +        ? -avframe->linesize[0]
> +        : avframe->linesize[0];

abs()?

> +    ctx->video->p_data = (avframe->linesize[0] < 0)
> +        ? (void *)(avframe->data[0] + avframe->linesize[0] * (avframe->height - 1))
> +        : (void *)(avframe->data[0]);

Did you test with negative linesize? It looks like it will flip the
video.

> +
> +    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
> +        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
> +

> +    NDIlib_send_send_video_async(ctx->ndi_send, ctx->video);
> +
> +    if (ctx->last_avframe)
> +        av_frame_free(&ctx->last_avframe);
> +    ctx->last_avframe = avframe;

This looks very strange. Can you explain?

It looks to me like NDIlib_send_send_video_async() is only asynchronous
for one frame, but will block if a second frame is given before the
first one has been sent. Is that it? If so, a comment would be nice.

> +
> +    return 0;
> +}
> +
> +static int ndi_write_audio_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +
> +    ctx->audio->p_data = (void*)pkt->data;
> +    ctx->audio->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000});
> +    ctx->audio->no_samples = pkt->size / (ctx->audio->no_channels << 1);
> +
> +    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
> +        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
> +
> +    NDIlib_util_send_send_audio_interleaved_16s(ctx->ndi_send, ctx->audio);
> +
> +    return 0;
> +}
> +
> +static int ndi_write_packet(AVFormatContext *avctx, AVPacket *pkt)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    AVStream *st = avctx->streams[pkt->stream_index];
> +
> +    ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
> +
> +    if      (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
> +        return ndi_write_video_packet(avctx, st, pkt);
> +    else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
> +        return ndi_write_audio_packet(avctx, st, pkt);
> +

> +    return AVERROR(EIO);

AVERROR_BUG

> +}
> +
> +static int ndi_setup_audio(AVFormatContext *avctx, AVStream *st)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    AVCodecParameters *c = st->codecpar;
> +
> +    if (ctx->audio) {
> +        av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");

> +        return -1;

AVERROR(EINVAL)

> +    }
> +    if (c->sample_rate != 48000) {
> +        av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
> +               " Only 48kHz is supported.\n");

> +        return -1;

Ditto.

> +    }
> +    if (c->channels != 2 && c->channels != 4 && c->channels != 8) {
> +        av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
> +               " Only stereo and 7.1 are supported.\n");

Check channel layout too.

> +        return -1;

Ditto.

> +    }
> +
> +    ctx->audio = (NDIlib_audio_frame_interleaved_16s_t *) av_mallocz(sizeof(NDIlib_audio_frame_interleaved_16s_t));

Unnecessary (and thus harmful) cast.

And missing failure check.

> +
> +    ctx->audio->sample_rate = c->sample_rate;
> +    ctx->audio->no_channels = c->channels;
> +    ctx->audio->reference_level = ctx->reference_level;
> +
> +    /* The device expects the sample rate to be fixed. */
> +    avpriv_set_pts_info(st, 64, 1, c->sample_rate);
> +
> +    return 0;
> +}
> +
> +static int ndi_setup_video(AVFormatContext *avctx, AVStream *st)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    AVCodecParameters *c = st->codecpar;
> +    AVRational display_aspect_ratio;
> +
> +    if (ctx->video) {
> +        av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");

> +        return AVERROR(ENOTSUP);

I suspect this library exists also for Windows and Macos, so you cannot
use ENOTSUP. EINVAL.

> +    }
> +
> +    if (c->codec_id != AV_CODEC_ID_WRAPPED_AVFRAME) {
> +        av_log(avctx, AV_LOG_ERROR, "Unsupported codec format!"
> +               " Only AV_CODEC_ID_WRAPPED_AVFRAME is supported (-vcodec wrapped_avframe).\n");

> +        return AVERROR(ENOTSUP);

Ditto.

> +    }
> +
> +    if (c->format != AV_PIX_FMT_UYVY422) {
> +        av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
> +               " Only AV_PIX_FMT_UYVY422 is supported.\n");

> +        return AVERROR(ENOTSUP);

Ditto.

> +    }
> +
> +    ctx->video = (NDIlib_video_frame_t *) av_mallocz(sizeof(NDIlib_video_frame_t));

Unnecessary (and thus harmful) cast.

And missing failure check.

> +
> +    ctx->video->FourCC = NDIlib_FourCC_type_UYVY;
> +    ctx->video->xres = c->width;
> +    ctx->video->yres = c->height;

> +    ctx->video->frame_rate_N = st->time_base.den;
> +    ctx->video->frame_rate_D = st->time_base.num;

st->time_base is not the frame rate. Use avg_frame_rate.

> +    ctx->video->frame_format_type = AV_FIELD_PROGRESSIVE == c->field_order
> +        ? NDIlib_frame_format_type_progressive
> +        : NDIlib_frame_format_type_interleaved;
> +
> +    if (st->sample_aspect_ratio.num &&
> +        av_cmp_q(st->sample_aspect_ratio, st->codecpar->sample_aspect_ratio)) {

> +        av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den,
> +                  st->codecpar->width  * (int64_t)st->sample_aspect_ratio.num,
> +                  st->codecpar->height * (int64_t)st->sample_aspect_ratio.den,
> +                  1024 * 1024);

I think you can use rational arithmetic to make this more readable.

> +        ctx->video->picture_aspect_ratio = av_q2d(display_aspect_ratio);
> +    }
> +    else
> +        ctx->video->picture_aspect_ratio = 16.0 / 9.0;
> +
> +    /* The device expects the framerate to be fixed. */
> +    avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
> +
> +    return 0;
> +}
> +
> +static int ndi_write_header(AVFormatContext *avctx)
> +{
> +    int ret = 0;
> +    unsigned int n;
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    const NDIlib_send_create_t ndi_send_desc = {avctx->filename, NULL, true, false};
> +
> +    /* check if streams compatible */
> +    for (n = 0; n < avctx->nb_streams; n++) {
> +        AVStream *st = avctx->streams[n];
> +        AVCodecParameters *c = st->codecpar;

> +        if        (c->codec_type == AVMEDIA_TYPE_AUDIO) {
> +            if ((ret = ndi_setup_audio(avctx, st)))
> +                goto error;
> +        } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
> +            if ((ret = ndi_setup_video(avctx, st)))
> +                goto error;
> +        } else {
> +            av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");

> +            ret = AVERROR(EIO);

EINVAL

> +            goto error;
> +        }
> +    }
> +
> +    if (!NDIlib_initialize()) {
> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");

ret = AVERROR_EXTERNAL;

> +    } else {
> +        ctx->ndi_send = NDIlib_send_create(&ndi_send_desc);

> +        if (!ctx->ndi_send)
> +            av_log(avctx, AV_LOG_ERROR, "Failed to create NDI output %s\n", avctx->filename);

ret = AVERROR_EXTERNAL;

> +        else
> +            ret = 0;

ret is already 0.

> +    }
> +
> +error:
> +    return ret;
> +}
> +
> +#define OFFSET(x) offsetof(struct ndi_ctx, x)
> +static const AVOption options[] = {
> +
> +    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_AUDIO_PARAM},
> +    { NULL },
> +};
> +
> +static const AVClass ndi_muxer_class = {
> +    .class_name = "NDI muxer",
> +    .item_name  = av_default_item_name,
> +    .option     = options,
> +    .version    = LIBAVUTIL_VERSION_INT,
> +    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT,
> +};
> +
> +AVOutputFormat ff_ndi_muxer = {
> +    .name           = "ndi",

> +    .long_name      = NULL_IF_CONFIG_SMALL("NDI output"),

Same comment as above.

> +    .audio_codec    = AV_CODEC_ID_PCM_S16LE,
> +    .video_codec    = AV_CODEC_ID_WRAPPED_AVFRAME,
> +    .subtitle_codec = AV_CODEC_ID_NONE,
> +    .flags          = AVFMT_NOFILE,
> +    .priv_class     = &ndi_muxer_class,
> +    .priv_data_size = sizeof(struct ndi_ctx),
> +    .write_header   = ndi_write_header,
> +    .write_packet   = ndi_write_packet,
> +    .write_trailer  = ndi_write_trailer,
> +};

Regards,
Michael Niedermayer July 23, 2017, 11:21 a.m. UTC | #2
On Sun, Jul 23, 2017 at 11:08:17AM +0300, Maksym Veremeyenko wrote:
> Hi,
> 
> attached patched for HEAD and n3.3 that implemented NewTek NDI,
> standard created by NewTek to make it easy to develop video-related
> products that share video on a local Ethernet network.
> 
> -- 
> Maksym Veremeyenko
> 

>  configure                |    9 
>  doc/indevs.texi          |   54 +++++
>  doc/outdevs.texi         |   36 +++
>  libavdevice/Makefile     |    2 
>  libavdevice/alldevices.c |    1 
>  libavdevice/ndi_dec.c    |  499 +++++++++++++++++++++++++++++++++++++++++++++++
>  libavdevice/ndi_enc.c    |  289 +++++++++++++++++++++++++++
>  7 files changed, 890 insertions(+)
> d6587ae4380ee1ef39a7b4787b5908e3ca850bba  0001-Implement-NewTek-NDI-support-head.patch
> From d5df13750d029d5314be24c9b1cb7306947507c1 Mon Sep 17 00:00:00 2001
> From: Maksym Veremeyenko <verem@m1.tv>
> Date: Sun, 23 Jul 2017 04:03:31 -0400
> Subject: [PATCH] Implement NewTek NDI support
> 
> ---
>  configure                |   9 +
>  doc/indevs.texi          |  54 +++++
>  doc/outdevs.texi         |  36 ++++
>  libavdevice/Makefile     |   2 +
>  libavdevice/alldevices.c |   1 +
>  libavdevice/ndi_dec.c    | 499 +++++++++++++++++++++++++++++++++++++++++++++++
>  libavdevice/ndi_enc.c    | 289 +++++++++++++++++++++++++++
>  7 files changed, 890 insertions(+)
>  create mode 100644 libavdevice/ndi_dec.c
>  create mode 100644 libavdevice/ndi_enc.c

you may also want to add yourself to the MAINTAINER file

thx

[...]
Marton Balint July 23, 2017, 11:53 a.m. UTC | #3
On Sun, 23 Jul 2017, Maksym Veremeyenko wrote:

> Hi,
>
> attached patched for HEAD and n3.3 that implemented NewTek NDI, standard 
> created by NewTek to make it easy to develop video-related products that 
> share video on a local Ethernet network.

Thanks! Below are some comments.

> From: Maksym Veremeyenko <verem@m1.tv>
> Date: Sun, 23 Jul 2017 04:03:31 -0400
> Subject: [PATCH] Implement NewTek NDI support
> 
> ---
>  configure                |   9 +
>  doc/indevs.texi          |  54 +++++
>  doc/outdevs.texi         |  36 ++++
>  libavdevice/Makefile     |   2 +
>  libavdevice/alldevices.c |   1 +
>  libavdevice/ndi_dec.c    | 499 +++++++++++++++++++++++++++++++++++++++++++++++
>  libavdevice/ndi_enc.c    | 289 +++++++++++++++++++++++++++
>  7 files changed, 890 insertions(+)
>  create mode 100644 libavdevice/ndi_dec.c
>  create mode 100644 libavdevice/ndi_enc.c
> 
> diff --git a/configure b/configure
> index 5811ee1..f267b07 100755
> --- a/configure
> +++ b/configure
> @@ -277,6 +277,7 @@ External library support:
>    --enable-libzvbi         enable teletext support via libzvbi [no]
>    --disable-lzma           disable lzma [autodetect]
>    --enable-decklink        enable Blackmagic DeckLink I/O support [no]
> +  --enable-ndi             enable Newteck NDI I/O support [no]
>    --enable-mediacodec      enable Android MediaCodec support [no]
>    --enable-libmysofa       enable libmysofa, needed for sofalizer filter [no]
>    --enable-openal          enable OpenAL 1.1 capture support [no]
> @@ -1507,6 +1508,7 @@ EXTERNAL_LIBRARY_GPL_LIST="
>
>  EXTERNAL_LIBRARY_NONFREE_LIST="
>      decklink
> +    ndi
>      libfdk_aac
>      openssl
>  "
> @@ -3012,6 +3014,10 @@ decklink_indev_deps="decklink threads"
>  decklink_indev_extralibs="-lstdc++"
>  decklink_outdev_deps="decklink threads"
>  decklink_outdev_extralibs="-lstdc++"
> +ndi_indev_deps="ndi pthreads"

you only need threads not pthreads.

> +ndi_indev_extralibs="-lndi"
> +ndi_outdev_deps="ndi pthreads"

as far as I see you don't need any threading here.

> +ndi_outdev_extralibs="-lndi"
>  dshow_indev_deps="IBaseFilter"
>  dshow_indev_extralibs="-lpsapi -lole32 -lstrmiids -luuid -loleaut32 -lshlwapi"
>  dv1394_indev_deps="dv1394"
> @@ -5564,6 +5570,8 @@ avisynth_demuxer_extralibs='$ldl'
>  cuda_extralibs='$ldl'
>  decklink_outdev_extralibs="$decklink_outdev_extralibs $ldl"
>  decklink_indev_extralibs="$decklink_indev_extralibs $ldl"
> +ndi_outdev_extralibs="$ndi_outdev_extralibs $ldl"
> +ndi_indev_extralibs="$ndi_indev_extralibs $ldl"
>  frei0r_filter_extralibs='$ldl'
>  frei0r_src_filter_extralibs='$ldl'
>  ladspa_filter_extralibs='$ldl'
> @@ -5822,6 +5830,7 @@ enabled coreimage_filter  && { check_header_objcc QuartzCore/CoreImage.h || disa
>  enabled coreimagesrc_filter && { check_header_objcc QuartzCore/CoreImage.h || disable coreimagesrc_filter; }
>  enabled decklink          && { { check_header DeckLinkAPI.h || die "ERROR: DeckLinkAPI.h header not found"; } &&
>                                 { check_cpp_condition DeckLinkAPIVersion.h "BLACKMAGIC_DECKLINK_API_VERSION >= 0x0a060100" || die "ERROR: Decklink API version must be >= 10.6.1."; } }
> +enabled ndi               && { check_header Processing.NDI.Lib.h || die "ERROR: Processing.NDI.Lib.h header not found"; }
>  enabled frei0r            && { check_header frei0r.h || die "ERROR: frei0r.h header not found"; }
>  enabled gmp               && require gmp gmp.h mpz_export -lgmp
>  enabled gnutls            && require_pkg_config gnutls gnutls/gnutls.h gnutls_global_init
> diff --git a/doc/indevs.texi b/doc/indevs.texi
> index 09e3321..7e9a5f1 100644
> --- a/doc/indevs.texi
> +++ b/doc/indevs.texi
> @@ -327,6 +327,60 @@ ffmpeg -channels 16 -format_code Hi50 -f decklink -i 'UltraStudio Mini Recorder'
>
>  @end itemize
> 
> +@section ndi
> +
> +The ndi input device provides capture capabilities for using NDI (Network
> +Device Interface, standard created by NewTek).
> +
> +To enable this input device, you need the NDI SDK and you
> +need to configure with the appropriate @code{--extra-cflags}
> +and @code{--extra-ldflags}.
> +
> +@subsection Options
> +
> +@table @option
> +
> +@item find_sources

Maybe list_devices would be a better name, it is more consistent with other devices.

> +If set to @option{true}, print a list of found/available NDI sources and exit.
> +Defaults to @option{false}.
> +
> +@item wait_sources
> +Override time to wait until the number of online sources have changed (ms).
> +Defaults to @option{500}.
> +
> +@item allow_video_fields
> +When this flag is @option{false}, all video that you receive will be progressive.
> +Defaults to @option{1}.

Defaults to @option{true}.

> +
> +@item max_frames_probe
> +Maximum frames for probe input format.
> +Defaults to @option{25}.
> +
> +@item reference_level
> +The audio reference level in dB. This specifies how many dB above the
> +reference level (+4dBU) is the full range of 16 bit audio.
> +Defaults to @option{0}.
> +
> +@end table
> +
> +@subsection Examples
> +
> +@itemize
> +
> +@item
> +List input devices:
> +@example
> +ffmpeg -f ndi -find_sources 1 -i foo
> +@end example
> +
> +@item
> +Restream to NDI:
> +@example
> +ffmpeg -f ndi -i "DEV-5.INTERNAL.M1STEREO.TV (NDI_SOURCE_NAME_1)" -acodec copy -vcodec wrapped_avframe -f ndi -y NDI_SOURCE_NAME_2
> +@end example
> +
> +@end itemize
> +
>  @section dshow
>
>  Windows DirectShow input device.
> diff --git a/doc/outdevs.texi b/doc/outdevs.texi
> index df41cc8..7830cc7 100644
> --- a/doc/outdevs.texi
> +++ b/doc/outdevs.texi
> @@ -182,6 +182,42 @@ ffmpeg -i test.avi -f decklink -pix_fmt uyvy422 -s 720x486 -r 24000/1001 'DeckLi
>
>  @end itemize
> 
> +@section ndi
> +
> +The ndi output device provides playback capabilities for using NDI (Network
> +Device Interface, standard created by NewTek).
> +
> +To enable this output device, you need the NDI SDK and you
> +need to configure with the appropriate @code{--extra-cflags}
> +and @code{--extra-ldflags}.
> +
> +NDI is very picky about the formats it supports. Pixel format is always
> +uyvy422, framerate, field order and video size must be determined for your
> +device with @command{-list_formats 1}. Audio sample rate is always 48 kHz.
> +
> +@subsection Options
> +
> +@table @option
> +
> +@item reference_level
> +The audio reference level in dB. This specifies how many dB above the
> +reference level (+4dBU) is the full range of 16 bit audio.
> +Defaults to @option{0}.
> +
> +@end table
> +
> +@subsection Examples
> +
> +@itemize
> +
> +@item
> +Play video clip:
> +@example
> +ffmpeg -i "udp://@239.1.1.1:10480?fifo_size=1000000&overrun_nonfatal=1" -vf "scale=720:576,fps=fps=25,setdar=dar=16/9,format=pix_fmts=uyvy422" -vcodec wrapped_avframe -f ndi NEW_NDI1
> +@end example
> +
> +@end itemize
> +
>  @section fbdev
>
>  Linux framebuffer output device.
> diff --git a/libavdevice/Makefile b/libavdevice/Makefile
> index 1d4e9e6..a7e0a18 100644
> --- a/libavdevice/Makefile
> +++ b/libavdevice/Makefile
> @@ -19,6 +19,8 @@ OBJS-$(CONFIG_BKTR_INDEV)                += bktr.o
>  OBJS-$(CONFIG_CACA_OUTDEV)               += caca.o
>  OBJS-$(CONFIG_DECKLINK_OUTDEV)           += decklink_enc.o decklink_enc_c.o decklink_common.o
>  OBJS-$(CONFIG_DECKLINK_INDEV)            += decklink_dec.o decklink_dec_c.o decklink_common.o
> +OBJS-$(CONFIG_NDI_OUTDEV)                += ndi_enc.o
> +OBJS-$(CONFIG_NDI_INDEV)                 += ndi_dec.o
>  OBJS-$(CONFIG_DSHOW_INDEV)               += dshow_crossbar.o dshow.o dshow_enummediatypes.o \
>                                              dshow_enumpins.o dshow_filter.o \
>                                              dshow_pin.o dshow_common.o
> diff --git a/libavdevice/alldevices.c b/libavdevice/alldevices.c
> index a8ed53a..bbea836 100644
> --- a/libavdevice/alldevices.c
> +++ b/libavdevice/alldevices.c
> @@ -46,6 +46,7 @@ static void register_all(void)
>      REGISTER_INDEV   (BKTR,             bktr);
>      REGISTER_OUTDEV  (CACA,             caca);
>      REGISTER_INOUTDEV(DECKLINK,         decklink);
> +    REGISTER_INOUTDEV(NDI,              ndi);
>      REGISTER_INDEV   (DSHOW,            dshow);
>      REGISTER_INDEV   (DV1394,           dv1394);
>      REGISTER_INOUTDEV(FBDEV,            fbdev);
> diff --git a/libavdevice/ndi_dec.c b/libavdevice/ndi_dec.c
> new file mode 100644
> index 0000000..7454ca8
> --- /dev/null
> +++ b/libavdevice/ndi_dec.c
> @@ -0,0 +1,499 @@
> +/*
> + * NDI input
> + * Copyright (c) 2017 Maksym Veremeyenko
> + *
> + * This file is part of FFmpeg.
> + *
> + * FFmpeg is free software; you can redistribute it and/or
> + * modify it under the terms of the GNU Lesser General Public
> + * License as published by the Free Software Foundation; either
> + * version 2.1 of the License, or (at your option) any later version.
> + *
> + * FFmpeg is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> + * Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public
> + * License along with FFmpeg; if not, write to the Free Software
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> + */
> +
> +#include "libavformat/avformat.h"
> +#include "libavformat/internal.h"
> +#include "libavutil/opt.h"
> +#include "libavutil/imgutils.h"
> +
> +#include <pthread.h>
> +//#include <semaphore.h>
> +
> +#include <Processing.NDI.Lib.h>
> +
> +typedef struct AVPacketQueue {
> +    AVPacketList *first_pkt, *last_pkt;
> +    int nb_packets;
> +    unsigned long long size;
> +    int abort_request;
> +    pthread_mutex_t mutex;
> +    pthread_cond_t cond;
> +    AVFormatContext *avctx;
> +} AVPacketQueue;

I guess the AVPacketQueue is duplicated from decklink_dec. So you either
factorize that into a seprarate file, or you may also consider not using a
queue here, and get rid of it, because according to the SDK, NDI lib also has a
small (how small?) internal queue, so a few frames of buffering is provided by
the NDI lib itself, therefore doing it on your own might not be needed at all.
If you do this, you can drop the threads dependancy here as well.

> +
> +static void avpacket_queue_init(AVFormatContext *avctx, AVPacketQueue *q)
> +{
> +    memset(q, 0, sizeof(AVPacketQueue));
> +    pthread_mutex_init(&q->mutex, NULL);
> +    pthread_cond_init(&q->cond, NULL);
> +    q->avctx = avctx;
> +}
> +
> +static void avpacket_queue_flush(AVPacketQueue *q)
> +{
> +    AVPacketList *pkt, *pkt1;
> +
> +    pthread_mutex_lock(&q->mutex);
> +    for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
> +        pkt1 = pkt->next;
> +        av_packet_unref(&pkt->pkt);
> +        av_freep(&pkt);
> +    }
> +    q->last_pkt   = NULL;
> +    q->first_pkt  = NULL;
> +    q->nb_packets = 0;
> +    q->size       = 0;
> +    pthread_mutex_unlock(&q->mutex);
> +}
> +
> +static void avpacket_queue_end(AVPacketQueue *q)
> +{
> +    avpacket_queue_flush(q);
> +    pthread_mutex_destroy(&q->mutex);
> +    pthread_cond_destroy(&q->cond);
> +}
> +
> +static unsigned long long avpacket_queue_size(AVPacketQueue *q)
> +{
> +    unsigned long long size;
> +    pthread_mutex_lock(&q->mutex);
> +    size = q->size;
> +    pthread_mutex_unlock(&q->mutex);
> +    return size;
> +}
> +
> +static int avpacket_queue_put(AVPacketQueue *q, AVPacket *pkt)
> +{
> +    AVPacketList *pkt1;
> +
> +    // Drop Packet if queue size is > 1GB
> +    if (avpacket_queue_size(q) >  1024 * 1024 * 1024 ) {
> +        av_log(q->avctx, AV_LOG_WARNING,  "%s: input buffer overrun!\n", __FUNCTION__);
> +        return -1;
> +    }
> +    /* duplicate the packet */
> +    if (av_dup_packet(pkt) < 0) {
> +        return -1;
> +    }
> +
> +    pkt1 = (AVPacketList *)av_malloc(sizeof(AVPacketList));
> +    if (!pkt1) {
> +        return -1;
> +    }
> +    pkt1->pkt  = *pkt;
> +    pkt1->next = NULL;
> +
> +    pthread_mutex_lock(&q->mutex);
> +
> +    if (!q->last_pkt) {
> +        q->first_pkt = pkt1;
> +    } else {
> +        q->last_pkt->next = pkt1;
> +    }
> +
> +    q->last_pkt = pkt1;
> +    q->nb_packets++;
> +    q->size += pkt1->pkt.size + sizeof(*pkt1);
> +
> +    pthread_cond_signal(&q->cond);
> +
> +    pthread_mutex_unlock(&q->mutex);
> +    return 0;
> +}
> +
> +static int avpacket_queue_get(AVPacketQueue *q, AVPacket *pkt, int block)
> +{
> +    AVPacketList *pkt1;
> +    int ret;
> +
> +    pthread_mutex_lock(&q->mutex);
> +
> +    for (;; ) {
> +        pkt1 = q->first_pkt;
> +        if (pkt1) {
> +            q->first_pkt = pkt1->next;
> +            if (!q->first_pkt) {
> +                q->last_pkt = NULL;
> +            }
> +            q->nb_packets--;
> +            q->size -= pkt1->pkt.size + sizeof(*pkt1);
> +            *pkt     = pkt1->pkt;
> +            av_free(pkt1);
> +            ret = 1;
> +            break;
> +        } else if (!block) {
> +            ret = 0;
> +            break;
> +        } else {
> +            pthread_cond_wait(&q->cond, &q->mutex);
> +        }
> +    }
> +    pthread_mutex_unlock(&q->mutex);
> +    return ret;
> +}
> +
> +struct ndi_ctx {
> +    const AVClass *cclass;
> +
> +    void *ctx;
> +
> +    /* Options */
> +    int find_sources;
> +    int wait_sources;
> +    int allow_video_fields;
> +    int max_frames_probe;
> +    int reference_level;
> +
> +    /* Runtime */
> +    NDIlib_recv_create_t* recv;
> +    pthread_t reader;
> +    int f_started, f_stop, dropped;
> +
> +    /* Streams */
> +    AVStream *video_st, *audio_st;
> +    AVPacketQueue queue;
> +    AVFormatContext *avctx;
> +    int interlaced;
> +};
> +
> +static int ndi_enqeue_video(struct ndi_ctx *ctx, NDIlib_video_frame_t *v)
> +{
> +    AVPacket pkt;
> +    av_init_packet(&pkt);
> +
> +    pkt.dts = pkt.pts = av_rescale_q(v->timecode, (AVRational){1, 10000000LL}, ctx->video_st->time_base);
> +    pkt.duration = av_rescale_q(1, (AVRational){v->frame_rate_D, v->frame_rate_N}, ctx->video_st->time_base);
> +
> +    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
> +        __FUNCTION__, pkt.dts, pkt.duration, v->timecode);
> +
> +    pkt.flags       |= AV_PKT_FLAG_KEY;
> +    pkt.stream_index = ctx->video_st->index;
> +    pkt.data         = (uint8_t *)v->p_data;
> +    pkt.size         = v->yres * v->line_stride_in_bytes;
> +
> +    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
> +        ++ctx->dropped;
> +    }
> +
> +//    NDIlib_recv_free_video(ctx->recv, v);

why don't you free this here?

> +
> +    return 0;
> +}
> +
> +static int ndi_enqeue_audio(struct ndi_ctx *ctx, NDIlib_audio_frame_t *a)
> +{
> +    AVPacket pkt;
> +    NDIlib_audio_frame_interleaved_16s_t dst;
> +
> +    av_init_packet(&pkt);
> +
> +    pkt.dts = pkt.pts = av_rescale_q(a->timecode, (AVRational){1, 10000000LL}, ctx->audio_st->time_base);
> +    pkt.duration = av_rescale_q(1, (AVRational){a->no_samples, a->sample_rate}, ctx->audio_st->time_base);
> +
> +    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
> +        __FUNCTION__, pkt.dts, pkt.duration, a->timecode);
> +
> +    pkt.size         = 2 * a->no_samples * a->no_channels;
> +    pkt.data         = (uint8_t *)av_mallocz(pkt.size);
> +
> +    pkt.flags       |= AV_PKT_FLAG_KEY;
> +    pkt.stream_index = ctx->audio_st->index;
> +
> +    dst.reference_level = 0;
> +    dst.p_data = (short*)pkt.data;
> +    NDIlib_util_audio_to_interleaved_16s(a, &dst);
> +
> +    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
> +        ++ctx->dropped;
> +    }
> +
> +    NDIlib_recv_free_audio(ctx->recv, a);
> +
> +    return 0;
> +}
> +
> +static void* ndi_reader(void* p)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)p;
> +
> +    while (!ctx->f_stop)
> +    {
> +        NDIlib_video_frame_t v;
> +        NDIlib_audio_frame_t a;
> +        NDIlib_metadata_frame_t m;
> +        NDIlib_frame_type_e t;
> +
> +        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
> +
> +        if (NDIlib_frame_type_video == t)
> +            ndi_enqeue_video(ctx, &v);
> +        else if (NDIlib_frame_type_audio == t)
> +            ndi_enqeue_audio(ctx, &a);
> +        else if (NDIlib_frame_type_metadata == t)
> +            NDIlib_recv_free_metadata(ctx->recv, &m);
> +    };
> +
> +    return NULL;
> +};
> +
> +static int ndi_find_sources(AVFormatContext *avctx, const char* name, NDIlib_source_t *source_to_connect_to)
> +{
> +    unsigned int n, i, j = -1;

j should be signed int, and default to AVERROR(ENODEV), and maybe should be
renamed to ret.

> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    const NDIlib_source_t* ndi_srcs = NULL;
> +    const NDIlib_find_create_t find_create_desc = { true, NULL };
> +    NDIlib_find_instance_t ndi_find;
> +
> +    ndi_find = NDIlib_find_create2( &find_create_desc );
> +    if (!ndi_find) {
> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_find_create failed.\n");
> +        return AVERROR(EIO);
> +    }
> +
> +    NDIlib_find_wait_for_sources(ndi_find, ctx->wait_sources);
> +    ndi_srcs = NDIlib_find_get_current_sources(ndi_find, &n);
> +
> +    av_log(avctx, AV_LOG_INFO, "Found %d NDI sources:\n", n);
> +    for (i = 0; i < n; i++)
> +    {
> +        av_log(avctx, AV_LOG_INFO, "\t'%s'\t'%s'\n", ndi_srcs[i].p_ndi_name, ndi_srcs[i].p_ip_address);
> +        if (!strcmp(name, ndi_srcs[i].p_ndi_name)) {
> +            *source_to_connect_to = ndi_srcs[i];
> +            j = i;
> +        }
> +    }

You might only want to list all sources, if the user wanted list_sources. If
the user chose a specific source, I don't think there is a good reason to list
all available sources.

> +
> +    NDIlib_find_destroy(ndi_find);
> +
> +    return j;
> +}
> +
> +static int ndi_read_header(AVFormatContext *avctx)
> +{
> +    int ret, i;
> +    AVStream *st;
> +    NDIlib_recv_create_t recv_create_desc;
> +    const NDIlib_tally_t tally_state = { true, false };
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +
> +    ctx->avctx = avctx;
> +
> +    if (!NDIlib_initialize()) {
> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");
> +        return AVERROR(EIO);
> +    }
> +
> +    /* Find available sources. */
> +    ret = ndi_find_sources(avctx, avctx->filename, &recv_create_desc.source_to_connect_to);
> +    if (ctx->find_sources) {
> +        return AVERROR_EXIT;
> +    }
> +    if (ret < 0) {
> +        return AVERROR(ENOENT);

if (ret < 0)
     return ret;

> +    }
> +
> +    /* Create receiver description */
> +    recv_create_desc.color_format = NDIlib_recv_color_format_e_UYVY_RGBA;
> +    recv_create_desc.bandwidth = NDIlib_recv_bandwidth_highest;
> +    recv_create_desc.allow_video_fields = ctx->allow_video_fields;
> +
> +    /* Create the receiver */
> +    ctx->recv = NDIlib_recv_create2(&recv_create_desc);
> +    if (!ctx->recv) {
> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_recv_create2 failed.\n");
> +        return AVERROR(EIO);
> +    }
> +
> +    /* Set tally */
> +    NDIlib_recv_set_tally(ctx->recv, &tally_state);
> +
> +    avpacket_queue_init(avctx, &ctx->queue);
> +
> +    /* Probe input */
> +    for (ret = 0, i = 0; i < ctx->max_frames_probe; i++)
> +    {
> +        NDIlib_video_frame_t v;
> +        NDIlib_audio_frame_t a;
> +        NDIlib_metadata_frame_t m;
> +        NDIlib_frame_type_e t;
> +
> +        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
> +
> +        av_log(avctx, AV_LOG_DEBUG, "NDIlib_recv_capture[%d]=%d.\n", i, t);
> +
> +        if (NDIlib_frame_type_video == t) {
> +            if (!ctx->video_st) {
> +
> +                st = avformat_new_stream(avctx, NULL);
> +                if (!st) {
> +                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
> +                    ret = AVERROR(ENOMEM);
> +                    goto error;
> +                }
> +
> +                st->time_base.den               = v.frame_rate_N;
> +                st->time_base.num               = v.frame_rate_D;
> +                av_stream_set_r_frame_rate(st, av_make_q(st->time_base.den, st->time_base.num));
> +                st->codecpar->codec_type        = AVMEDIA_TYPE_VIDEO;
> +                st->codecpar->width             = v.xres;
> +                st->codecpar->height            = v.yres;
> +                st->codecpar->codec_id          = AV_CODEC_ID_RAWVIDEO;
> +                st->codecpar->bit_rate          = av_rescale(v.xres * v.yres * 16, st->time_base.den, st->time_base.num);
> +
> +                if (NDIlib_FourCC_type_UYVY == v.FourCC || NDIlib_FourCC_type_UYVA == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_UYVY422;
> +                    st->codecpar->codec_tag     = MKTAG('U', 'Y', 'V', 'Y');
> +                } else if (NDIlib_FourCC_type_BGRA == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_BGRA;
> +                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', 'A');
> +                } else if (NDIlib_FourCC_type_BGRX == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_BGR0;
> +                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', '0');
> +                } else if (NDIlib_FourCC_type_RGBA == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_RGBA;
> +                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', 'A');
> +                } else if (NDIlib_FourCC_type_RGBX == v.FourCC) {
> +                    st->codecpar->format        = AV_PIX_FMT_RGB0;
> +                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', '0');
> +                } else {
> +                    av_log(avctx, AV_LOG_ERROR, "Unsupported video stream format, v.FourCC=%d\n", v.FourCC);
> +                    ret = AVERROR(ENOTSUP);
> +                    goto error;
> +                }
> +
> +                ctx->interlaced = v.frame_format_type == NDIlib_frame_format_type_interleaved ? 1 : 0;
> +
> +                avpriv_set_pts_info(st, 64, v.frame_rate_D, v.frame_rate_N);
> +
> +                ctx->video_st = st;
> +            }
> +
> +            ndi_enqeue_video(ctx, &v);
> +        } else if (NDIlib_frame_type_audio == t) {
> +            if (!ctx->audio_st) {
> +
> +                st = avformat_new_stream(avctx, NULL);
> +                if (!st) {
> +                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
> +                    ret = AVERROR(ENOMEM);
> +                    goto error;
> +                }
> +
> +                st->codecpar->codec_type        = AVMEDIA_TYPE_AUDIO;
> +                st->codecpar->codec_id          = AV_CODEC_ID_PCM_S16LE;
> +                st->codecpar->sample_rate       = a.sample_rate;
> +                st->codecpar->channels          = a.no_channels;
> +
> +                avpriv_set_pts_info(st, 64, 1, a.sample_rate);  /* 64 bits pts in us */
> +
> +                ctx->audio_st = st;
> +            }
> +
> +            ndi_enqeue_audio(ctx, &a);
> +        } else if (NDIlib_frame_type_metadata == t) {
> +
> +            NDIlib_recv_free_metadata(ctx->recv, &m);
> +        }
> +
> +        if (ctx->video_st && ctx->audio_st)
> +            break;
> +    }
> +
> +    if (ctx->video_st || ctx->audio_st)
> +    {
> +        ctx->f_started = 1;
> +        pthread_create(&ctx->reader, NULL, ndi_reader, ctx);
> +    }

else
     ret = AVERROR(EIO);

> +
> +error:
> +    return ret;
> +}
> +
> +static int ndi_read_packet(AVFormatContext *avctx, AVPacket *pkt)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +
> +    AVFrame *frame = ctx->video_st ? ctx->video_st->codec->coded_frame : NULL;
> +
> +    avpacket_queue_get(&ctx->queue, pkt, 1);
> +
> +    if (frame && ctx->interlaced) {
> +        frame->interlaced_frame = 1;
> +        frame->top_field_first = 1;
> +    }
> +
> +    return 0;
> +
> +}
> +
> +static int ndi_read_close(AVFormatContext *avctx)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +
> +    if (ctx->recv)
> +    {
> +        if (ctx->f_started)
> +        {
> +            ctx->f_stop = 1;
> +            pthread_join(ctx->reader, NULL);
> +        }
> +
> +        ctx->f_started = 0;
> +        ctx->f_stop = 0;
> +
> +        avpacket_queue_end(&ctx->queue);
> +        NDIlib_recv_destroy(ctx->recv);
> +    };
> +
> +    return 0;
> +}
> +
> +#define OFFSET(x) offsetof(struct ndi_ctx, x)
> +#define DEC AV_OPT_FLAG_DECODING_PARAM
> +
> +static const AVOption options[] = {
> +    { "find_sources", "Find available sources"  , OFFSET(find_sources), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, DEC },
> +    { "wait_sources", "Time to wait until the number of online sources have changed (ms)"  , OFFSET(wait_sources), AV_OPT_TYPE_INT, { .i64 = 500 }, 100, 10000, DEC },
> +    { "allow_video_fields", "When this flag is FALSE, all video that you receive will be progressive"  , OFFSET(allow_video_fields), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, DEC },
> +    { "max_frames_probe", "Maximum frames for probe"  , OFFSET(max_frames_probe), AV_OPT_TYPE_INT, { .i64 = 25 }, 6, 100, DEC },
> +    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, DEC | AV_OPT_FLAG_AUDIO_PARAM},
> +    { NULL },
> +};
> +
> +static const AVClass ndi_demuxer_class = {
> +    .class_name = "NDI demuxer",
> +    .item_name  = av_default_item_name,
> +    .option     = options,
> +    .version    = LIBAVUTIL_VERSION_INT,
> +    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
> +};
> +
> +AVInputFormat ff_ndi_demuxer = {
> +    .name           = "ndi",
> +    .long_name      = NULL_IF_CONFIG_SMALL("NDI input"),
> +    .flags          = AVFMT_NOFILE,
> +    .priv_class     = &ndi_demuxer_class,
> +    .priv_data_size = sizeof(struct ndi_ctx),
> +    .read_header   = ndi_read_header,
> +    .read_packet   = ndi_read_packet,
> +    .read_close    = ndi_read_close,
> +};
> diff --git a/libavdevice/ndi_enc.c b/libavdevice/ndi_enc.c
> new file mode 100644
> index 0000000..8cbdb9e
> --- /dev/null
> +++ b/libavdevice/ndi_enc.c
> @@ -0,0 +1,289 @@
> +/*
> + * NDI output
> + * Copyright (c) 2017 Maksym Veremeyenko
> + *
> + * This file is part of FFmpeg.
> + *
> + * FFmpeg is free software; you can redistribute it and/or
> + * modify it under the terms of the GNU Lesser General Public
> + * License as published by the Free Software Foundation; either
> + * version 2.1 of the License, or (at your option) any later version.
> + *
> + * FFmpeg is distributed in the hope that it will be useful,
> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
> + * Lesser General Public License for more details.
> + *
> + * You should have received a copy of the GNU Lesser General Public
> + * License along with FFmpeg; if not, write to the Free Software
> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> + */
> +
> +#include "libavformat/avformat.h"
> +#include "libavformat/internal.h"
> +#include "libavutil/opt.h"
> +#include "libavutil/imgutils.h"
> +
> +#include <Processing.NDI.Lib.h>
> +
> +struct ndi_ctx {
> +    const AVClass *cclass;
> +
> +    void *ctx;
> +
> +    /* Options */
> +    int reference_level;
> +
> +    /* Streams present */
> +    NDIlib_video_frame_t* video;
> +    NDIlib_audio_frame_interleaved_16s_t* audio;
> +
> +    /* Status */
> +    int64_t last_pts;
> +
> +    NDIlib_send_instance_t ndi_send;
> +    AVFrame *last_avframe;
> +};
> +
> +static int ndi_write_trailer(AVFormatContext *avctx)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +
> +    if (ctx->ndi_send)
> +    {
> +        if (ctx->last_avframe) {
> +            NDIlib_send_send_video_async(ctx->ndi_send, NULL);
> +            av_frame_free(&ctx->last_avframe);
> +        }
> +
> +        NDIlib_send_destroy(ctx->ndi_send);
> +    }
> +
> +    if (ctx->video)
> +        av_freep(&ctx->video);
> +
> +    if (ctx->audio)
> +        av_freep(&ctx->audio);
> +
> +    return 0;
> +}
> +
> +static int ndi_write_video_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    AVFrame *avframe, *tmp = (AVFrame *)pkt->data;
> +
> +    if (tmp->format != AV_PIX_FMT_UYVY422 ||
> +        tmp->width  != ctx->video->xres ||
> +        tmp->height != ctx->video->yres) {
> +        av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
> +        av_log(avctx, AV_LOG_ERROR, "tmp->width=%d, tmp->height=%d, ctx->video->xres=%d, ctx->video->yres=%d\n", tmp->width, tmp->height, ctx->video->xres, ctx->video->yres);
> +        return AVERROR(EINVAL);
> +    }
> +    avframe = av_frame_clone(tmp);
> +    if (!avframe) {
> +        av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
> +        return AVERROR(EIO);
> +    }
> +
> +    ctx->video->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000LL});
> +
> +    ctx->video->line_stride_in_bytes = avframe->linesize[0] < 0
> +        ? -avframe->linesize[0]
> +        : avframe->linesize[0];
> +    ctx->video->p_data = (avframe->linesize[0] < 0)
> +        ? (void *)(avframe->data[0] + avframe->linesize[0] * (avframe->height - 1))
> +        : (void *)(avframe->data[0]);
> +
> +    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
> +        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
> +
> +    NDIlib_send_send_video_async(ctx->ndi_send, ctx->video);
> +
> +    if (ctx->last_avframe)
> +        av_frame_free(&ctx->last_avframe);
> +    ctx->last_avframe = avframe;
> +
> +    return 0;
> +}
> +
> +static int ndi_write_audio_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +
> +    ctx->audio->p_data = (void*)pkt->data;
> +    ctx->audio->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000});
> +    ctx->audio->no_samples = pkt->size / (ctx->audio->no_channels << 1);
> +
> +    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
> +        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
> +
> +    NDIlib_util_send_send_audio_interleaved_16s(ctx->ndi_send, ctx->audio);
> +
> +    return 0;
> +}
> +
> +static int ndi_write_packet(AVFormatContext *avctx, AVPacket *pkt)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    AVStream *st = avctx->streams[pkt->stream_index];
> +
> +    ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
> +
> +    if      (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
> +        return ndi_write_video_packet(avctx, st, pkt);
> +    else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
> +        return ndi_write_audio_packet(avctx, st, pkt);
> +
> +    return AVERROR(EIO);
> +}
> +
> +static int ndi_setup_audio(AVFormatContext *avctx, AVStream *st)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    AVCodecParameters *c = st->codecpar;
> +
> +    if (ctx->audio) {
> +        av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
> +        return -1;
> +    }
> +    if (c->sample_rate != 48000) {
> +        av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
> +               " Only 48kHz is supported.\n");
> +        return -1;
> +    }
> +    if (c->channels != 2 && c->channels != 4 && c->channels != 8) {
> +        av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
> +               " Only stereo and 7.1 are supported.\n");
> +        return -1;
> +    }
> +
> +    ctx->audio = (NDIlib_audio_frame_interleaved_16s_t *) av_mallocz(sizeof(NDIlib_audio_frame_interleaved_16s_t));
> +
> +    ctx->audio->sample_rate = c->sample_rate;
> +    ctx->audio->no_channels = c->channels;
> +    ctx->audio->reference_level = ctx->reference_level;
> +
> +    /* The device expects the sample rate to be fixed. */
> +    avpriv_set_pts_info(st, 64, 1, c->sample_rate);
> +
> +    return 0;
> +}
> +
> +static int ndi_setup_video(AVFormatContext *avctx, AVStream *st)
> +{
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    AVCodecParameters *c = st->codecpar;
> +    AVRational display_aspect_ratio;
> +
> +    if (ctx->video) {
> +        av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
> +        return AVERROR(ENOTSUP);
> +    }
> +
> +    if (c->codec_id != AV_CODEC_ID_WRAPPED_AVFRAME) {
> +        av_log(avctx, AV_LOG_ERROR, "Unsupported codec format!"
> +               " Only AV_CODEC_ID_WRAPPED_AVFRAME is supported (-vcodec wrapped_avframe).\n");
> +        return AVERROR(ENOTSUP);
> +    }
> +
> +    if (c->format != AV_PIX_FMT_UYVY422) {
> +        av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
> +               " Only AV_PIX_FMT_UYVY422 is supported.\n");
> +        return AVERROR(ENOTSUP);
> +    }
> +
> +    ctx->video = (NDIlib_video_frame_t *) av_mallocz(sizeof(NDIlib_video_frame_t));
> +
> +    ctx->video->FourCC = NDIlib_FourCC_type_UYVY;
> +    ctx->video->xres = c->width;
> +    ctx->video->yres = c->height;
> +    ctx->video->frame_rate_N = st->time_base.den;
> +    ctx->video->frame_rate_D = st->time_base.num;
> +    ctx->video->frame_format_type = AV_FIELD_PROGRESSIVE == c->field_order
> +        ? NDIlib_frame_format_type_progressive
> +        : NDIlib_frame_format_type_interleaved;
> +
> +    if (st->sample_aspect_ratio.num &&
> +        av_cmp_q(st->sample_aspect_ratio, st->codecpar->sample_aspect_ratio)) {
> +        av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den,
> +                  st->codecpar->width  * (int64_t)st->sample_aspect_ratio.num,
> +                  st->codecpar->height * (int64_t)st->sample_aspect_ratio.den,
> +                  1024 * 1024);
> +        ctx->video->picture_aspect_ratio = av_q2d(display_aspect_ratio);
> +    }
> +    else
> +        ctx->video->picture_aspect_ratio = 16.0 / 9.0;

Shouldn't this be (double)width/height? Assume square pixel aspect ratio by default, no?

> +
> +    /* The device expects the framerate to be fixed. */
> +    avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
> +
> +    return 0;
> +}
> +
> +static int ndi_write_header(AVFormatContext *avctx)
> +{
> +    int ret = 0;

You either set ret to AVERROR_EXTERNAL here or set it below for each case where
a failure happens

> +    unsigned int n;
> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
> +    const NDIlib_send_create_t ndi_send_desc = {avctx->filename, NULL, true, false};
> +
> +    /* check if streams compatible */
> +    for (n = 0; n < avctx->nb_streams; n++) {
> +        AVStream *st = avctx->streams[n];
> +        AVCodecParameters *c = st->codecpar;
> +        if        (c->codec_type == AVMEDIA_TYPE_AUDIO) {
> +            if ((ret = ndi_setup_audio(avctx, st)))
> +                goto error;
> +        } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
> +            if ((ret = ndi_setup_video(avctx, st)))
> +                goto error;
> +        } else {
> +            av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");
> +            ret = AVERROR(EIO);
> +            goto error;
> +        }
> +    }
> +
> +    if (!NDIlib_initialize()) {
> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");

here

> +    } else {
> +        ctx->ndi_send = NDIlib_send_create(&ndi_send_desc);
> +        if (!ctx->ndi_send)
> +            av_log(avctx, AV_LOG_ERROR, "Failed to create NDI output %s\n", avctx->filename);

or here.

> +        else
> +            ret = 0;
> +    }
> +
> +error:
> +    return ret;
> +}
> +
> +#define OFFSET(x) offsetof(struct ndi_ctx, x)
> +static const AVOption options[] = {
> +
> +    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_AUDIO_PARAM},
> +    { NULL },
> +};
> +
> +static const AVClass ndi_muxer_class = {
> +    .class_name = "NDI muxer",
> +    .item_name  = av_default_item_name,
> +    .option     = options,
> +    .version    = LIBAVUTIL_VERSION_INT,
> +    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT,
> +};
> +
> +AVOutputFormat ff_ndi_muxer = {
> +    .name           = "ndi",
> +    .long_name      = NULL_IF_CONFIG_SMALL("NDI output"),
> +    .audio_codec    = AV_CODEC_ID_PCM_S16LE,
> +    .video_codec    = AV_CODEC_ID_WRAPPED_AVFRAME,
> +    .subtitle_codec = AV_CODEC_ID_NONE,
> +    .flags          = AVFMT_NOFILE,
> +    .priv_class     = &ndi_muxer_class,
> +    .priv_data_size = sizeof(struct ndi_ctx),
> +    .write_header   = ndi_write_header,
> +    .write_packet   = ndi_write_packet,
> +    .write_trailer  = ndi_write_trailer,
> +};
> -- 
> 1.8.3.1
>

Thanks,
Marton
Nicolas George July 23, 2017, 11:55 a.m. UTC | #4
Le quintidi 5 thermidor, an CCXXV, Marton Balint a écrit :
> >+    if (ctx->video_st || ctx->audio_st)
> >+    {
> >+        ctx->f_started = 1;
> >+        pthread_create(&ctx->reader, NULL, ndi_reader, ctx);
> >+    }
> 
> else
>     ret = AVERROR(EIO);

EINVAL, I think.

Regards,
Marton Balint July 23, 2017, 12:43 p.m. UTC | #5
On Sun, 23 Jul 2017, Nicolas George wrote:

[..]

>> +    int abort_request;
>> +    pthread_mutex_t mutex;
>> +    pthread_cond_t cond;
>> +    AVFormatContext *avctx;
>> +} AVPacketQueue;
>
> This whole API looks duplicated in the encoder. Not acceptable. You need
> to move it into a pair of .c/.h files, with the "ff_" prefix for the
> function names (the structure name can stay AV, since it is not
> exported).
>
> But I think it would be better if you tried to use AVThreadMessageQueue
> instead. You can extend it if necessary.

That is copied from decklink_dec as far as I see. Maybe better to 
factorize first, and move it to AVThreadMessageQueue as a second step?



>> +    pkt1 = (AVPacketList *)av_malloc(sizeof(AVPacketList));
>
> Drop the cast, it is useless in C (this is not c++) and harmful (it can
> hide warnings).

I guess some of the casts are there because decklink_dec was c++ code...



>> +    /* Probe input */
>> +    for (ret = 0, i = 0; i < ctx->max_frames_probe; i++)
>
> This is already implemented, in a more generic and uniform way, in
> libavformat. Drop the loop and move the format-probing code into
> ndi_read_packet().

Maybe it's just me, but I am not sure what kind of probing are you 
referring to. Could you explain in a bit more detail how the changed code 
should work?



>> +    AVFrame *frame = ctx->video_st ? ctx->video_st->codec->coded_frame : NULL;
>
> This line produces a warning, does it not? Anyway, you are not supposed
> to use st->codec any longer.

I guess that is copied from decklink as well. As far as I see, the correct 
replacement is to set codecpar->field_order in read_header, and that is 
it, right?



>> +    { "wait_sources", "Time to wait until the number of online sources have changed (ms)"  , OFFSET(wait_sources), AV_OPT_TYPE_INT, { .i64 = 500 }, 100, 10000, DEC },
>
> AV_OPT_TYPE_DURATION

Note: duration is microsec, not milisec, so code/docs needs updating as 
well.


>> +    }
>> +    if (c->channels != 2 && c->channels != 4 && c->channels != 8) {
>> +        av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
>> +               " Only stereo and 7.1 are supported.\n");
>
> Check channel layout too.

I think it is better if it works with unkown channel layouts as well, as 
long as the number of channels are supported.



>> +    if (ctx->video) {
>> +        av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
>
>> +        return AVERROR(ENOTSUP);
>
> I suspect this library exists also for Windows and Macos, so you cannot
> use ENOTSUP. EINVAL.

Or maybe ENOSYS?


Regards,
Marton
Reimar Döffinger July 23, 2017, 12:48 p.m. UTC | #6
On 23.07.2017, at 12:20, Nicolas George <george@nsup.org> wrote:

> 
>> +    if (!q->last_pkt) {
>> +        q->first_pkt = pkt1;
>> +    } else {
>> +        q->last_pkt->next = pkt1;
>> +    }


If kept, I would invert the condition, otherwise the else is essentially a double-negation.

>> +    q->last_pkt = pkt1;
> 
> There is a little known but nice simplification when implementing
> linked-lists like that: instead of "last_pkt", have in the structure
> "AVPacketList **tail", and you can write this whole block without a
> condition:
> 
>    *q->tail = pkt1;
>    q->tail = &(*q->tail)->next;

It's without condition and an optimization, I am not sure it quite qualifies as "simplification" though.
Nicolas George July 23, 2017, 3:07 p.m. UTC | #7
Le quintidi 5 thermidor, an CCXXV, Marton Balint a écrit :
> That is copied from decklink_dec as far as I see. Maybe better to factorize
> first, and move it to AVThreadMessageQueue as a second step?

I had not noticed it was copied from decklink_dec. If so, then it must
absolutely be shared between this decklink_dec and NDI.

And indeed, it would probably be better to change decklink_dec to use
AVThreadMessageQueue, but it is harder for new code.

> Maybe it's just me, but I am not sure what kind of probing are you referring
> to. Could you explain in a bit more detail how the changed code should work?

The same way it works for formats without global header, for example
MPEG-PS: read_header() does not create any stream; read_packet() creates
the stream when it sees a packet for one that does not exist yet. And
libavformat takes care of the rest. I think it requires
AVFMTCTX_NOHEADER.

> I guess that is copied from decklink as well. As far as I see, the correct
> replacement is to set codecpar->field_order in read_header, and that is it,
> right?

I think so.

> I think it is better if it works with unkown channel layouts as well, as
> long as the number of channels are supported.

I do not agree, and especially not in the device itself. There are many
small issues where some component invents a default layout when only a
count is provided, and then we are unable to propagate a count without a
layout.

> >>+        av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
> >>+        return AVERROR(ENOTSUP);
> >I suspect this library exists also for Windows and Macos, so you cannot
> >use ENOTSUP. EINVAL.
> Or maybe ENOSYS?

ENOTSUP -> "Operation not supported" / "Not supported"
ENOSYS  -> "Function not implemented" / "Function not supported"
EINVAL  -> "Invalid argument"

In this case, the problem is that the avctx argument has several
streams: EINVAL is really the correct one.

Regards,
Maksym Veremeyenko July 23, 2017, 7:43 p.m. UTC | #8
On 23.07.2017 13:20, Nicolas George wrote:
> Le quintidi 5 thermidor, an CCXXV, Maksym Veremeyenko a écrit :
[...]
>> Subject: [PATCH] Implement NewTek NDI support
>
> Nit: "lavf: implement..."
>

would it be ok:

    Subject: [PATCH] lavf: implement NewTek NDI support

[...]
> I would prefer libndi_newteck, or maybe just libndi. Because if this
> protocol is successful, in one or two years there may be GSoC student
> implementing it natively for FFmpeg, and it would get the short name.
>

would you *newtek_ndi* or *libndi_newtek"

> Could they be convinced to release their code with an usable licence?
>
no answers currently, i will clarify.

[...]
>> diff --git a/doc/indevs.texi b/doc/indevs.texi
>> index 09e3321..7e9a5f1 100644
>> --- a/doc/indevs.texi
>> +++ b/doc/indevs.texi
>> @@ -327,6 +327,60 @@ ffmpeg -channels 16 -format_code Hi50 -f decklink -i 'UltraStudio Mini Recorder'
>>
>>  @end itemize
>>
>> +@section ndi
>> +
>> +The ndi input device provides capture capabilities for using NDI (Network
>> +Device Interface, standard created by NewTek).
>> +
>
> Please add a few words about the syntax for the input "filename".
>

input filename is a /ndi source name/ that could be found by sending 
-find_sources 1 to command line - it has no specific syntax but 
human-readable formatted.

>> +To enable this input device, you need the NDI SDK and you
>> +need to configure with the appropriate @code{--extra-cflags}
>> +and @code{--extra-ldflags}.
>
> Could they be convinced to provide a pkg-config file?
>
for now only libs and headers (examples also) files provided.

[...]
>> +@example
>
>> +ffmpeg -f ndi -find_sources 1 -i foo
>
> "foo" is not very elegant; "dummy"?
>
>> +@end example
>> +
>> +@item
>> +Restream to NDI:
>> +@example
>
>> +ffmpeg -f ndi -i "DEV-5.INTERNAL.M1STEREO.TV (NDI_SOURCE_NAME_1)" -acodec copy -vcodec wrapped_avframe -f ndi -y NDI_SOURCE_NAME_2
>
> Nowadays, I think we prefer "-c:a" and "-c:v".
>
ok

> Is -vcodec wrapped_avframe really necessary? It should be automatic.
>
yes, i can remove that if you prefer.

>> +#include <pthread.h>
>
>> +//#include <semaphore.h>
>
> To remove before applying.
>
i will


>> +struct ndi_ctx {
>
> Nit: NDIContext for consistency.
>
ok

>> +    const AVClass *cclass;
>> +
>
>> +    void *ctx;
>
> Looks unused.
>
it used for av_log in reader thread

>> +
>> +    /* Options */
>> +    int find_sources;
>> +    int wait_sources;
>> +    int allow_video_fields;
>> +    int max_frames_probe;
>> +    int reference_level;
>> +
>> +    /* Runtime */
>
>> +    NDIlib_recv_create_t* recv;
>
> Here and everywhere: nit: the pointer mark * belongs with the variable
> or field, not with the type.
>
will be fixed


>> +    pthread_t reader;
>
>> +    int f_started, f_stop, dropped;
>
> "f_"? What does it mean?
>
flag
bad prefix?

>> +
>> +    /* Streams */
>> +    AVStream *video_st, *audio_st;
>> +    AVPacketQueue queue;
>
>> +    AVFormatContext *avctx;
>
> All the functions are already called with avctx as a parameter, no need
> for a back pointer.
>
as mentioned above, it used in a thread

>> +    int interlaced;
>> +};
>> +
>> +static int ndi_enqeue_video(struct ndi_ctx *ctx, NDIlib_video_frame_t *v)
>> +{
>> +    AVPacket pkt;
>> +    av_init_packet(&pkt);
>> +
>
>> +    pkt.dts = pkt.pts = av_rescale_q(v->timecode, (AVRational){1, 10000000LL}, ctx->video_st->time_base);
>
> The remark about not using "long" applies to qualified integers, of
> course.
>
> Nit: #define NDI_TIMEBASE and use it everywhere.
>
ok

>> +    pkt.duration = av_rescale_q(1, (AVRational){v->frame_rate_D, v->frame_rate_N}, ctx->video_st->time_base);
>
> Unless I am mistaken, video_st->time_base is set to
> (AVRational){v->frame_rate_D, v->frame_rate_N}. Therefore, I suggest to
> drop this av_rescale_q() and add an av_assert1() instead (be sure to
> always use --assert-level=2 when developing).
>
i still have some doubts about stream time_base used, i think it should 
be 1/10000000 like their timecode value, rather then framerate, because 
if source will provide variable frame rate it would be hard to compute 
real frame duration

>> +
>> +    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
>
>> +        __FUNCTION__, pkt.dts, pkt.duration, v->timecode);
>
> __FUNCTION__ is not standard, but used elsewhere in the code; __func__
> is more standard and also used in the code. But printing the function
> name in debug messages is not usually done in the code. Better just make
> sure that the message is easily greppable.
>
> Same below of course.
>
i can use __func__ but if you prefer, i will drop this.


[...]
>> +//    NDIlib_recv_free_video(ctx->recv, v);
>
> Cleanup?
>
i wanted to avoid double copying of video frame data and use 
av_free_packet's call to free packet data, but as i see now, frame data 
allocated in SDK's code by new operator

>> +static void* ndi_reader(void* p)
>> +{
>
>> +    struct ndi_ctx *ctx = (struct ndi_ctx *)p;
>
> Unnecessary (and thus harmful) cast.
>
ok

>> +
>
>> +    while (!ctx->f_stop)
>> +    {
>
> Nit: braces on the same line. Same below.
>
ok

>> +        NDIlib_video_frame_t v;
>> +        NDIlib_audio_frame_t a;
>> +        NDIlib_metadata_frame_t m;
>> +        NDIlib_frame_type_e t;
>> +
>> +        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
>> +
>
>> +        if (NDIlib_frame_type_video == t)
>
> The test looks strange in that direction.
>
why? that approach could save from mistypes like

     if (t = NDIlib_frame_type_video)

>> +            ndi_enqeue_video(ctx, &v);
>> +        else if (NDIlib_frame_type_audio == t)
>> +            ndi_enqeue_audio(ctx, &a);
>> +        else if (NDIlib_frame_type_metadata == t)
>> +            NDIlib_recv_free_metadata(ctx->recv, &m);
>
> Is there a risk of leak if a new type of packet is invented? Looks like
> a bad API design by NewTek.
>
API in a developing stage and not final - we can expect everythign

[...]
>> +    for (i = 0; i < n; i++)
>> +    {
>> +        av_log(avctx, AV_LOG_INFO, "\t'%s'\t'%s'\n", ndi_srcs[i].p_ndi_name, ndi_srcs[i].p_ip_address);
>> +        if (!strcmp(name, ndi_srcs[i].p_ndi_name)) {
>> +            *source_to_connect_to = ndi_srcs[i];
>
>> +            j = i;
>
> break?
>
yes

>> +        }
>> +    }
>> +
>> +    NDIlib_find_destroy(ndi_find);
>> +
>
>> +    return j;
>
> j must be unsigned, then.
>
negative j mean negative result of find

>> +    if (!NDIlib_initialize()) {
>> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");
>
>> +        return AVERROR(EIO);
>
> AVERROR_EXTERNAL (unless the library provides error codes that could be
> translated).
>
ok

>> +    }
>> +
>> +    /* Find available sources. */
>
>> +    ret = ndi_find_sources(avctx, avctx->filename, &recv_create_desc.source_to_connect_to);
>> +    if (ctx->find_sources) {
>
> The find_sources option looks unnecessary if the sources are always
> printed and it only causes an error.
>
it is required to exit from find sources request like show_devices in a 
decklink module

>> +    /* Probe input */
>> +    for (ret = 0, i = 0; i < ctx->max_frames_probe; i++)
>
> This is already implemented, in a more generic and uniform way, in
> libavformat. Drop the loop and move the format-probing code into
> ndi_read_packet().
>
could you give me a hint/examples?

[...]
>> +                if (NDIlib_FourCC_type_UYVY == v.FourCC || NDIlib_FourCC_type_UYVA == v.FourCC) {
>> +                    st->codecpar->format        = AV_PIX_FMT_UYVY422;
>
> Looks strange: the A in UYVA means alpha, does it not? What happens to
> the alpha channel?
>

according to documentation:
[...]
<------>// This is a UYVY buffer followed immediately by an alpha 
channel buffer.^M
<------>// If the stride of the YCbCr component is "stride", then the 
alpha channel^M
<------>// starts at image_ptr + yres*stride. The alpha channel stride 
is stride/2.^M
<------>NDIlib_FourCC_type_UYVA = NDI_LIB_FOURCC('U', 'Y', 'V', 'A')^M
[...]
currently alpha channel is ignored


>> +        pthread_create(&ctx->reader, NULL, ndi_reader, ctx);
>
> Missing error check.
>
ok

>> +    AVFrame *frame = ctx->video_st ? ctx->video_st->codec->coded_frame : NULL;
>
> This line produces a warning, does it not? Anyway, you are not supposed
> to use st->codec any longer.
>
i blindly copying further code from decklink module that provides 
setting interlaced flag

[...]
>> +            ctx->f_stop = 1;
>
> This needs to be protected by a memory barrier, here and in the thread.
>
thread is reading, main app is writing only... but if you give me a 
example from ffmpeg's code or more appropriate approach for notifying 
thread to exit, i will reimplement it


>> +    { "find_sources", "Find available sources"  , OFFSET(find_sources), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, DEC },
>
> AV_OPT_TYPE_BOOL
>
OK

>> +    { "wait_sources", "Time to wait until the number of online sources have changed (ms)"  , OFFSET(wait_sources), AV_OPT_TYPE_INT, { .i64 = 500 }, 100, 10000, DEC },
>
> AV_OPT_TYPE_DURATION
>
OK

>> +    { "allow_video_fields", "When this flag is FALSE, all video that you receive will be progressive"  , OFFSET(allow_video_fields), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, DEC },
>
> AV_OPT_TYPE_BOOL
>
OK


>> +AVInputFormat ff_ndi_demuxer = {
>> +    .name           = "ndi",
>
>> +    .long_name      = NULL_IF_CONFIG_SMALL("NDI input"),
>
> "Network Device Interface (NDI) input using NewTek library"
>
ok


>> +    avframe = av_frame_clone(tmp);
>> +    if (!avframe) {
>> +        av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
>
>> +        return AVERROR(EIO);
>
> AVERROR(ENOMEM)
>
ok

>> +    }
>> +
>> +    ctx->video->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000LL});
>> +
>
>> +    ctx->video->line_stride_in_bytes = avframe->linesize[0] < 0
>> +        ? -avframe->linesize[0]
>> +        : avframe->linesize[0];
>
> abs()?
>
copied from decklink code

>> +    ctx->video->p_data = (avframe->linesize[0] < 0)
>> +        ? (void *)(avframe->data[0] + avframe->linesize[0] * (avframe->height - 1))
>> +        : (void *)(avframe->data[0]);
>
> Did you test with negative linesize? It looks like it will flip the
> video.
>
i did not test, but i leaved it for further processing

>> +    NDIlib_send_send_video_async(ctx->ndi_send, ctx->video);
>> +
>> +    if (ctx->last_avframe)
>> +        av_frame_free(&ctx->last_avframe);
>> +    ctx->last_avframe = avframe;
>
> This looks very strange. Can you explain?
>
> It looks to me like NDIlib_send_send_video_async() is only asynchronous
> for one frame, but will block if a second frame is given before the
> first one has been sent. Is that it? If so, a comment would be nice.
>
that exact behavior it has, i will add a notice/comment about that


>> +    if      (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
>> +        return ndi_write_video_packet(avctx, st, pkt);
>> +    else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
>> +        return ndi_write_audio_packet(avctx, st, pkt);
>> +
>
>> +    return AVERROR(EIO);
>
> AVERROR_BUG
>
ok

>> +}
>> +
>> +static int ndi_setup_audio(AVFormatContext *avctx, AVStream *st)
>> +{
>> +    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
>> +    AVCodecParameters *c = st->codecpar;
>> +
>> +    if (ctx->audio) {
>> +        av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
>
>> +        return -1;
>
> AVERROR(EINVAL)
ok

>> +    }
>> +    if (c->channels != 2 && c->channels != 4 && c->channels != 8) {
>> +        av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
>> +               " Only stereo and 7.1 are supported.\n");
>
> Check channel layout too.
>
i will drop this check at all

>> +    ctx->audio = (NDIlib_audio_frame_interleaved_16s_t *) av_mallocz(sizeof(NDIlib_audio_frame_interleaved_16s_t));
>
> Unnecessary (and thus harmful) cast.
>
> And missing failure check.
>
ok


>> +    if (ctx->video) {
>> +        av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
>
>> +        return AVERROR(ENOTSUP);
>
> I suspect this library exists also for Windows and Macos, so you cannot
> use ENOTSUP. EINVAL.
>
ok


>> +
>> +    if (!NDIlib_initialize()) {
>> +        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");
>
> ret = AVERROR_EXTERNAL;
>
>> +    } else {
>> +        ctx->ndi_send = NDIlib_send_create(&ndi_send_desc);
>
>> +        if (!ctx->ndi_send)
>> +            av_log(avctx, AV_LOG_ERROR, "Failed to create NDI output %s\n", avctx->filename);
>
> ret = AVERROR_EXTERNAL;
>
>> +        else
>> +            ret = 0;
>
> ret is already 0.
i will check
Maksym Veremeyenko July 23, 2017, 7:45 p.m. UTC | #9
On 23.07.2017 15:43, Marton Balint wrote:
[...]
>> But I think it would be better if you tried to use AVThreadMessageQueue
>> instead. You can extend it if necessary.
>
> That is copied from decklink_dec as far as I see. Maybe better to
> factorize first, and move it to AVThreadMessageQueue as a second step?
exactlty


>>> +    { "wait_sources", "Time to wait until the number of online
>>> sources have changed (ms)"  , OFFSET(wait_sources), AV_OPT_TYPE_INT,
>>> { .i64 = 500 }, 100, 10000, DEC },
>>
>> AV_OPT_TYPE_DURATION
>
> Note: duration is microsec, not milisec, so code/docs needs updating as
> well.
ok

>
>
>>> +    }
>>> +    if (c->channels != 2 && c->channels != 4 && c->channels != 8) {
>>> +        av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
>>> +               " Only stereo and 7.1 are supported.\n");
>>
>> Check channel layout too.
>
> I think it is better if it works with unkown channel layouts as well, as
> long as the number of channels are supported.
ok
Maksym Veremeyenko July 23, 2017, 7:52 p.m. UTC | #10
On 23.07.2017 18:07, Nicolas George wrote:

[...]
>> Maybe it's just me, but I am not sure what kind of probing are you referring
>> to. Could you explain in a bit more detail how the changed code should work?
>
> The same way it works for formats without global header, for example
> MPEG-PS: read_header() does not create any stream; read_packet() creates
> the stream when it sees a packet for one that does not exist yet. And
> libavformat takes care of the rest. I think it requires
> AVFMTCTX_NOHEADER.
>
absolutely

>> I guess that is copied from decklink as well. As far as I see, the correct
>> replacement is to set codecpar->field_order in read_header, and that is it,
>> right?
>
> I think so.
>
ok

>> I think it is better if it works with unkown channel layouts as well, as
>> long as the number of channels are supported.
>
> I do not agree, and especially not in the device itself. There are many
> small issues where some component invents a default layout when only a
> count is provided, and then we are unable to propagate a count without a
> layout.
>

i seen no hard limitation about audio channel numbers in SDK, so let's 
leave it for future

>>>> +        av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
>>>> +        return AVERROR(ENOTSUP);
>>> I suspect this library exists also for Windows and Macos, so you cannot
>>> use ENOTSUP. EINVAL.
>> Or maybe ENOSYS?
>
> ENOTSUP -> "Operation not supported" / "Not supported"
> ENOSYS  -> "Function not implemented" / "Function not supported"
> EINVAL  -> "Invalid argument"
>
> In this case, the problem is that the avctx argument has several
> streams: EINVAL is really the correct one.

i can use whatever you prefer
Maksym Veremeyenko July 23, 2017, 8:16 p.m. UTC | #11
On 23.07.2017 14:53, Marton Balint wrote:
[...]
>> @@ -3012,6 +3014,10 @@ decklink_indev_deps="decklink threads"
>>  decklink_indev_extralibs="-lstdc++"
>>  decklink_outdev_deps="decklink threads"
>>  decklink_outdev_extralibs="-lstdc++"
>> +ndi_indev_deps="ndi pthreads"
>
> you only need threads not pthreads.
>
ok

>> +ndi_indev_extralibs="-lndi"
>> +ndi_outdev_deps="ndi pthreads"
>
> as far as I see you don't need any threading here.
>
yes

>> +@item find_sources
>
> Maybe list_devices would be a better name, it is more consistent with
> other devices.
>
according to SDK documentation terminology, *source* and *find* used for 
this, there are no actual *devices* but only network sources.

>> +@item allow_video_fields
>> +When this flag is @option{false}, all video that you receive will be
>> progressive.
>> +Defaults to @option{1}.
>
> Defaults to @option{true}.
>
yes


>> +typedef struct AVPacketQueue {
>> +    AVPacketList *first_pkt, *last_pkt;
>> +    int nb_packets;
>> +    unsigned long long size;
>> +    int abort_request;
>> +    pthread_mutex_t mutex;
>> +    pthread_cond_t cond;
>> +    AVFormatContext *avctx;
>> +} AVPacketQueue;
>
> I guess the AVPacketQueue is duplicated from decklink_dec. So you either
exactly

> factorize that into a seprarate file, or you may also consider not using a
> queue here, and get rid of it, because according to the SDK, NDI lib
> also has a
> small (how small?) internal queue, so a few frames of buffering is
> provided by
> the NDI lib itself, therefore doing it on your own might not be needed
> at all.
> If you do this, you can drop the threads dependancy here as well.
>
it has internal queue, but the main goal of using AVPacketQueue is to 
avoid loosing packets during *probe* operation, if we discard this 
(ignore that packets or i find how deal in a case of AVFMTCTX_NOHEADER) 
then all decklink's AVPacketQueue code could be dropped



>> +    av_log(avctx, AV_LOG_INFO, "Found %d NDI sources:\n", n);
>> +    for (i = 0; i < n; i++)
>> +    {
>> +        av_log(avctx, AV_LOG_INFO, "\t'%s'\t'%s'\n",
>> ndi_srcs[i].p_ndi_name, ndi_srcs[i].p_ip_address);
>> +        if (!strcmp(name, ndi_srcs[i].p_ndi_name)) {
>> +            *source_to_connect_to = ndi_srcs[i];
>> +            j = i;
>> +        }
>> +    }
>
> You might only want to list all sources, if the user wanted
> list_sources. If
> the user chose a specific source, I don't think there is a good reason
> to list
> all available sources.
>
we have to find all sources to operate with a user's submitted value, 
because creating receiver operates with a struct returned by 
find_sources function rather then a name.

i can suppress printing found sources until find_sources specified


>> +    /* Find available sources. */
>> +    ret = ndi_find_sources(avctx, avctx->filename,
>> &recv_create_desc.source_to_connect_to);
>> +    if (ctx->find_sources) {
>> +        return AVERROR_EXIT;
>> +    }
>> +    if (ret < 0) {
>> +        return AVERROR(ENOENT);
>
> if (ret < 0)
>     return ret;
>
yes
Nicolas George July 24, 2017, 1:30 p.m. UTC | #12
Le quintidi 5 thermidor, an CCXXV, Maksym Veremeyenko a écrit :
> according to SDK documentation terminology, *source* and *find* used for
> this, there are no actual *devices* but only network sources.

This is a case where we have two opposite requirements: consistency
within FFmpeg or consistency with other NDI applications. In this
particular case, I think consistency within FFmpeg is more important.
But also remember that options can have several names, that would
eliminate the problem.

> it has internal queue, but the main goal of using AVPacketQueue is to avoid
> loosing packets during *probe* operation, if we discard this (ignore that
> packets or i find how deal in a case of AVFMTCTX_NOHEADER) then all
> decklink's AVPacketQueue code could be dropped

You have nothing special to do if using AVFMTCTX_NOHEADER: create no
stream in read_header(), in read_packet() create streams as needed (the
first time a video packet arrives, create the video stream, idem for
audio), and libavformat takes care of everything else: probing the
packets until properties are known, buffering the packets, etc.

If this API can be read without an explicit thread in the application,
then by all means get rid of all the threading code. Running components
in separate threads is better done with generic code.

> i can suppress printing found sources until find_sources specified

I think that is exactly what Marton meant.

Regards,
Nicolas George July 24, 2017, 5:11 p.m. UTC | #13
Le quintidi 5 thermidor, an CCXXV, Maksym Veremeyenko a écrit :
> would it be ok:
> 
>    Subject: [PATCH] lavf: implement NewTek NDI support

Exactly. Except I mistyped: that would be lavd, not lavf; my bad.

> would you *newtek_ndi* or *libndi_newtek"

I prefer libndi_newtek, but that is only my advice.

> >Is -vcodec wrapped_avframe really necessary? It should be automatic.
> yes, i can remove that if you prefer.

I think it is better if examples are as simple as possible.

> as mentioned above, it used in a thread

Yes, but you can give avctx to the thread and get the private context
from it instead of giving the private context and getting avctx from it.
I have not yet looked at what you did in the new version.

> i still have some doubts about stream time_base used, i think it should be
> 1/10000000 like their timecode value, rather then framerate, because if
> source will provide variable frame rate it would be hard to compute real
> frame duration

If the protocol supports VFR, then yes, keeping the timestamps is
probably the best choice. Actually, if they decided to provide these
timestamps, there is no reason to rescale them here, be it for video or
for audio.

> i can use __func__ but if you prefer, i will drop this.

My personal preference would be a message that can easily be found with
"git grep". But if you intend to maintain this code on the long run, the
decision is yours.

> >>+        if (NDIlib_frame_type_video == t)
> >The test looks strange in that direction.
> why? that approach could save from mistypes like

I know it is the usual justification for this idiom. But at the same
time, it is anti-idiomatic for most people. We say "Alice is in her
office", not "Alice's office contains her", and the same goes for
comparisons. Furthermore, all decent compilers know to print a warning
in these cases since quite some time.

As above, if you intent to maintain this code on the long run, then
decision is yours. Otherwise, better follow the usual coding style.

> >>+            ndi_enqeue_video(ctx, &v);
> >>+        else if (NDIlib_frame_type_audio == t)
> >>+            ndi_enqeue_audio(ctx, &a);
> >>+        else if (NDIlib_frame_type_metadata == t)
> >>+            NDIlib_recv_free_metadata(ctx->recv, &m);
> >Is there a risk of leak if a new type of packet is invented? Looks like
> >a bad API design by NewTek.
> API in a developing stage and not final - we can expect everythign

You seem to be in contact with them, then you may report this issue: a
function capable of freeing any kind of packet is absolutely necessary.
Otherwise, if a new kind of packet is introduced, older applications
will not be able to free them and leak. With a generic function,
applications can just free any unknown packet.

> could you give me a hint/examples?

I think it has been addressed in other mails.

> currently alpha channel is ignored

Can you clarify who is ignoring the alpha channel?

> i blindly copying further code from decklink module that provides setting
> interlaced flag

It is entirely possible that existing components have code that follow
what is no longer considered best practice, especially when these
components depend on proprietary libraries that few people have.

On the other hand, new code is expected to follow the best practices. In
particular, new code is expected to produce zero warnings with common
compilers (gcc, clang).

> thread is reading, main app is writing only...

This is a common misconception. The fact that the modification are
unidirectional avoids race conditions, but they are not the only issue.
The compiler is perfectly allowed to decide that this store is useless
and optimize it away. Even if the compiler is prevented to do so using
the "volatile" keyword, the processor can keep the value inside a cache
that is not shared between SMP instances.

For these reasons, a lock is not needed but a memory barrier is. A
memory barrier is exactly the instruction to flush the data completely
to/from shared memory.

>						 but if you give me a example
> from ffmpeg's code or more appropriate approach for notifying thread to
> exit, i will reimplement it

The simplest way of getting a memory barrier is a mutex, of course, even
though it is a bit overkill. Modern C has features in stdatomic.h that
can probably be of use, but I do not know how to use them. Also,
AVThreadMessageQueue has the feature of sending a single integer code
from reader to writer.

And of course, if you get rid of the thread, all this is moot.

> >>+    ctx->video->line_stride_in_bytes = avframe->linesize[0] < 0
> >>+        ? -avframe->linesize[0]
> >>+        : avframe->linesize[0];
> >abs()?
> copied from decklink code

Could still be simplified in new code.

> >>+    ctx->video->p_data = (avframe->linesize[0] < 0)
> >>+        ? (void *)(avframe->data[0] + avframe->linesize[0] * (avframe->height - 1))
> >>+        : (void *)(avframe->data[0]);
> >Did you test with negative linesize? It looks like it will flip the
> >video.
> i did not test, but i leaved it for further processing

I find this block quite suspicious. It really needs to be tested. But I
do not know how to easily generate frames with negative strides.

> >Check channel layout too.
> i will drop this check at all

Unfortunately, the documentation does not seem to be available without
registration. Can you check what they tell exactly?

If the library rejects channels configurations it does not support, then
there is no need to check before.

But if the library only checks the number of channels and makes
assumptions about the layout (very common behaviour), then the calling
code must check the layout. Otherwise, the auto-rematrixing code will
not be triggered and the output will be wrong.

Actually, did you check that they use the same channel order as FFmpeg ?
Debugging problems with channel order is a real nightmare for users (I
have spent hours myself on it when I purchased 5.1 speakers), so
developers ought to be very careful about it.

Regards,
diff mbox

Patch

From 3e87f4f643ca589fd511da495b2e0c90d38e87a8 Mon Sep 17 00:00:00 2001
From: Maksym Veremeyenko <verem@m1.tv>
Date: Sun, 23 Jul 2017 03:34:13 -0400
Subject: [PATCH] Implement NewTek NDI support

---
 configure                |   9 +
 doc/indevs.texi          |  54 +++++
 doc/outdevs.texi         |  36 ++++
 libavdevice/Makefile     |   2 +
 libavdevice/alldevices.c |   1 +
 libavdevice/ndi_dec.c    | 499 +++++++++++++++++++++++++++++++++++++++++++++++
 libavdevice/ndi_enc.c    | 289 +++++++++++++++++++++++++++
 7 files changed, 890 insertions(+)
 create mode 100644 libavdevice/ndi_dec.c
 create mode 100644 libavdevice/ndi_enc.c

diff --git a/configure b/configure
index 9b5789a..6cf283e 100755
--- a/configure
+++ b/configure
@@ -279,6 +279,7 @@  External library support:
   --enable-libzvbi         enable teletext support via libzvbi [no]
   --disable-lzma           disable lzma [autodetect]
   --enable-decklink        enable Blackmagic DeckLink I/O support [no]
+  --enable-ndi             enable Newteck NDI I/O support [no]
   --enable-mediacodec      enable Android MediaCodec support [no]
   --enable-netcdf          enable NetCDF, needed for sofalizer filter [no]
   --enable-openal          enable OpenAL 1.1 capture support [no]
@@ -1514,6 +1515,7 @@  EXTERNAL_LIBRARY_GPL_LIST="
 
 EXTERNAL_LIBRARY_NONFREE_LIST="
     decklink
+    ndi
     libfdk_aac
     openssl
 "
@@ -2994,6 +2996,10 @@  decklink_indev_deps="decklink pthreads"
 decklink_indev_extralibs="-lstdc++"
 decklink_outdev_deps="decklink pthreads"
 decklink_outdev_extralibs="-lstdc++"
+ndi_indev_deps="ndi pthreads"
+ndi_indev_extralibs="-lndi"
+ndi_outdev_deps="ndi pthreads"
+ndi_outdev_extralibs="-lndi"
 dshow_indev_deps="IBaseFilter"
 dshow_indev_extralibs="-lpsapi -lole32 -lstrmiids -luuid -loleaut32 -lshlwapi"
 dv1394_indev_deps="dv1394"
@@ -5507,6 +5513,8 @@  avisynth_demuxer_extralibs='$ldl'
 cuda_extralibs='$ldl'
 decklink_outdev_extralibs="$decklink_outdev_extralibs $ldl"
 decklink_indev_extralibs="$decklink_indev_extralibs $ldl"
+ndi_outdev_extralibs="$ndi_outdev_extralibs $ldl"
+ndi_indev_extralibs="$ndi_indev_extralibs $ldl"
 frei0r_filter_extralibs='$ldl'
 frei0r_src_filter_extralibs='$ldl'
 ladspa_filter_extralibs='$ldl'
@@ -5764,6 +5772,7 @@  enabled coreimage_filter  && { check_header_objcc QuartzCore/CoreImage.h || disa
 enabled coreimagesrc_filter && { check_header_objcc QuartzCore/CoreImage.h || disable coreimagesrc_filter; }
 enabled decklink          && { { check_header DeckLinkAPI.h || die "ERROR: DeckLinkAPI.h header not found"; } &&
                                { check_cpp_condition DeckLinkAPIVersion.h "BLACKMAGIC_DECKLINK_API_VERSION >= 0x0a060100" || die "ERROR: Decklink API version must be >= 10.6.1."; } }
+enabled ndi               && { check_header Processing.NDI.Lib.h || die "ERROR: Processing.NDI.Lib.h header not found"; }
 enabled frei0r            && { check_header frei0r.h || die "ERROR: frei0r.h header not found"; }
 enabled gmp               && require gmp gmp.h mpz_export -lgmp
 enabled gnutls            && require_pkg_config gnutls gnutls/gnutls.h gnutls_global_init
diff --git a/doc/indevs.texi b/doc/indevs.texi
index 51c304f..72baabd 100644
--- a/doc/indevs.texi
+++ b/doc/indevs.texi
@@ -321,6 +321,60 @@  ffmpeg -channels 16 -format_code Hi50 -f decklink -i 'UltraStudio Mini Recorder'
 
 @end itemize
 
+@section ndi
+
+The ndi input device provides capture capabilities for using NDI (Network
+Device Interface, standard created by NewTek).
+
+To enable this input device, you need the NDI SDK and you
+need to configure with the appropriate @code{--extra-cflags}
+and @code{--extra-ldflags}.
+
+@subsection Options
+
+@table @option
+
+@item find_sources
+If set to @option{true}, print a list of found/available NDI sources and exit.
+Defaults to @option{false}.
+
+@item wait_sources
+Override time to wait until the number of online sources have changed (ms).
+Defaults to @option{500}.
+
+@item allow_video_fields
+When this flag is @option{false}, all video that you receive will be progressive.
+Defaults to @option{1}.
+
+@item max_frames_probe
+Maximum frames for probe input format.
+Defaults to @option{25}.
+
+@item reference_level
+The audio reference level in dB. This specifies how many dB above the
+reference level (+4dBU) is the full range of 16 bit audio.
+Defaults to @option{0}.
+
+@end table
+
+@subsection Examples
+
+@itemize
+
+@item
+List input devices:
+@example
+ffmpeg -f ndi -find_sources 1 -i foo
+@end example
+
+@item
+Restream to NDI:
+@example
+ffmpeg -f ndi -i "DEV-5.INTERNAL.M1STEREO.TV (NDI_SOURCE_NAME_1)" -acodec copy -vcodec wrapped_avframe -f ndi -y NDI_SOURCE_NAME_2
+@end example
+
+@end itemize
+
 @section dshow
 
 Windows DirectShow input device.
diff --git a/doc/outdevs.texi b/doc/outdevs.texi
index df41cc8..7830cc7 100644
--- a/doc/outdevs.texi
+++ b/doc/outdevs.texi
@@ -182,6 +182,42 @@  ffmpeg -i test.avi -f decklink -pix_fmt uyvy422 -s 720x486 -r 24000/1001 'DeckLi
 
 @end itemize
 
+@section ndi
+
+The ndi output device provides playback capabilities for using NDI (Network
+Device Interface, standard created by NewTek).
+
+To enable this output device, you need the NDI SDK and you
+need to configure with the appropriate @code{--extra-cflags}
+and @code{--extra-ldflags}.
+
+NDI is very picky about the formats it supports. Pixel format is always
+uyvy422, framerate, field order and video size must be determined for your
+device with @command{-list_formats 1}. Audio sample rate is always 48 kHz.
+
+@subsection Options
+
+@table @option
+
+@item reference_level
+The audio reference level in dB. This specifies how many dB above the
+reference level (+4dBU) is the full range of 16 bit audio.
+Defaults to @option{0}.
+
+@end table
+
+@subsection Examples
+
+@itemize
+
+@item
+Play video clip:
+@example
+ffmpeg -i "udp://@239.1.1.1:10480?fifo_size=1000000&overrun_nonfatal=1" -vf "scale=720:576,fps=fps=25,setdar=dar=16/9,format=pix_fmts=uyvy422" -vcodec wrapped_avframe -f ndi NEW_NDI1
+@end example
+
+@end itemize
+
 @section fbdev
 
 Linux framebuffer output device.
diff --git a/libavdevice/Makefile b/libavdevice/Makefile
index 78c42e6..dc26bc5 100644
--- a/libavdevice/Makefile
+++ b/libavdevice/Makefile
@@ -19,6 +19,8 @@  OBJS-$(CONFIG_BKTR_INDEV)                += bktr.o
 OBJS-$(CONFIG_CACA_OUTDEV)               += caca.o
 OBJS-$(CONFIG_DECKLINK_OUTDEV)           += decklink_enc.o decklink_enc_c.o decklink_common.o
 OBJS-$(CONFIG_DECKLINK_INDEV)            += decklink_dec.o decklink_dec_c.o decklink_common.o
+OBJS-$(CONFIG_NDI_OUTDEV)                += ndi_enc.o
+OBJS-$(CONFIG_NDI_INDEV)                 += ndi_dec.o
 OBJS-$(CONFIG_DSHOW_INDEV)               += dshow_crossbar.o dshow.o dshow_enummediatypes.o \
                                             dshow_enumpins.o dshow_filter.o \
                                             dshow_pin.o dshow_common.o
diff --git a/libavdevice/alldevices.c b/libavdevice/alldevices.c
index 280a260..d968776 100644
--- a/libavdevice/alldevices.c
+++ b/libavdevice/alldevices.c
@@ -46,6 +46,7 @@  static void register_all(void)
     REGISTER_INDEV   (BKTR,             bktr);
     REGISTER_OUTDEV  (CACA,             caca);
     REGISTER_INOUTDEV(DECKLINK,         decklink);
+    REGISTER_INOUTDEV(NDI,              ndi);
     REGISTER_INDEV   (DSHOW,            dshow);
     REGISTER_INDEV   (DV1394,           dv1394);
     REGISTER_INOUTDEV(FBDEV,            fbdev);
diff --git a/libavdevice/ndi_dec.c b/libavdevice/ndi_dec.c
new file mode 100644
index 0000000..7454ca8
--- /dev/null
+++ b/libavdevice/ndi_dec.c
@@ -0,0 +1,499 @@ 
+/*
+ * NDI input
+ * Copyright (c) 2017 Maksym Veremeyenko
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavformat/avformat.h"
+#include "libavformat/internal.h"
+#include "libavutil/opt.h"
+#include "libavutil/imgutils.h"
+
+#include <pthread.h>
+//#include <semaphore.h>
+
+#include <Processing.NDI.Lib.h>
+
+typedef struct AVPacketQueue {
+    AVPacketList *first_pkt, *last_pkt;
+    int nb_packets;
+    unsigned long long size;
+    int abort_request;
+    pthread_mutex_t mutex;
+    pthread_cond_t cond;
+    AVFormatContext *avctx;
+} AVPacketQueue;
+
+static void avpacket_queue_init(AVFormatContext *avctx, AVPacketQueue *q)
+{
+    memset(q, 0, sizeof(AVPacketQueue));
+    pthread_mutex_init(&q->mutex, NULL);
+    pthread_cond_init(&q->cond, NULL);
+    q->avctx = avctx;
+}
+
+static void avpacket_queue_flush(AVPacketQueue *q)
+{
+    AVPacketList *pkt, *pkt1;
+
+    pthread_mutex_lock(&q->mutex);
+    for (pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
+        pkt1 = pkt->next;
+        av_packet_unref(&pkt->pkt);
+        av_freep(&pkt);
+    }
+    q->last_pkt   = NULL;
+    q->first_pkt  = NULL;
+    q->nb_packets = 0;
+    q->size       = 0;
+    pthread_mutex_unlock(&q->mutex);
+}
+
+static void avpacket_queue_end(AVPacketQueue *q)
+{
+    avpacket_queue_flush(q);
+    pthread_mutex_destroy(&q->mutex);
+    pthread_cond_destroy(&q->cond);
+}
+
+static unsigned long long avpacket_queue_size(AVPacketQueue *q)
+{
+    unsigned long long size;
+    pthread_mutex_lock(&q->mutex);
+    size = q->size;
+    pthread_mutex_unlock(&q->mutex);
+    return size;
+}
+
+static int avpacket_queue_put(AVPacketQueue *q, AVPacket *pkt)
+{
+    AVPacketList *pkt1;
+
+    // Drop Packet if queue size is > 1GB
+    if (avpacket_queue_size(q) >  1024 * 1024 * 1024 ) {
+        av_log(q->avctx, AV_LOG_WARNING,  "%s: input buffer overrun!\n", __FUNCTION__);
+        return -1;
+    }
+    /* duplicate the packet */
+    if (av_dup_packet(pkt) < 0) {
+        return -1;
+    }
+
+    pkt1 = (AVPacketList *)av_malloc(sizeof(AVPacketList));
+    if (!pkt1) {
+        return -1;
+    }
+    pkt1->pkt  = *pkt;
+    pkt1->next = NULL;
+
+    pthread_mutex_lock(&q->mutex);
+
+    if (!q->last_pkt) {
+        q->first_pkt = pkt1;
+    } else {
+        q->last_pkt->next = pkt1;
+    }
+
+    q->last_pkt = pkt1;
+    q->nb_packets++;
+    q->size += pkt1->pkt.size + sizeof(*pkt1);
+
+    pthread_cond_signal(&q->cond);
+
+    pthread_mutex_unlock(&q->mutex);
+    return 0;
+}
+
+static int avpacket_queue_get(AVPacketQueue *q, AVPacket *pkt, int block)
+{
+    AVPacketList *pkt1;
+    int ret;
+
+    pthread_mutex_lock(&q->mutex);
+
+    for (;; ) {
+        pkt1 = q->first_pkt;
+        if (pkt1) {
+            q->first_pkt = pkt1->next;
+            if (!q->first_pkt) {
+                q->last_pkt = NULL;
+            }
+            q->nb_packets--;
+            q->size -= pkt1->pkt.size + sizeof(*pkt1);
+            *pkt     = pkt1->pkt;
+            av_free(pkt1);
+            ret = 1;
+            break;
+        } else if (!block) {
+            ret = 0;
+            break;
+        } else {
+            pthread_cond_wait(&q->cond, &q->mutex);
+        }
+    }
+    pthread_mutex_unlock(&q->mutex);
+    return ret;
+}
+
+struct ndi_ctx {
+    const AVClass *cclass;
+
+    void *ctx;
+
+    /* Options */
+    int find_sources;
+    int wait_sources;
+    int allow_video_fields;
+    int max_frames_probe;
+    int reference_level;
+
+    /* Runtime */
+    NDIlib_recv_create_t* recv;
+    pthread_t reader;
+    int f_started, f_stop, dropped;
+
+    /* Streams */
+    AVStream *video_st, *audio_st;
+    AVPacketQueue queue;
+    AVFormatContext *avctx;
+    int interlaced;
+};
+
+static int ndi_enqeue_video(struct ndi_ctx *ctx, NDIlib_video_frame_t *v)
+{
+    AVPacket pkt;
+    av_init_packet(&pkt);
+
+    pkt.dts = pkt.pts = av_rescale_q(v->timecode, (AVRational){1, 10000000LL}, ctx->video_st->time_base);
+    pkt.duration = av_rescale_q(1, (AVRational){v->frame_rate_D, v->frame_rate_N}, ctx->video_st->time_base);
+
+    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
+        __FUNCTION__, pkt.dts, pkt.duration, v->timecode);
+
+    pkt.flags       |= AV_PKT_FLAG_KEY;
+    pkt.stream_index = ctx->video_st->index;
+    pkt.data         = (uint8_t *)v->p_data;
+    pkt.size         = v->yres * v->line_stride_in_bytes;
+
+    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
+        ++ctx->dropped;
+    }
+
+//    NDIlib_recv_free_video(ctx->recv, v);
+
+    return 0;
+}
+
+static int ndi_enqeue_audio(struct ndi_ctx *ctx, NDIlib_audio_frame_t *a)
+{
+    AVPacket pkt;
+    NDIlib_audio_frame_interleaved_16s_t dst;
+
+    av_init_packet(&pkt);
+
+    pkt.dts = pkt.pts = av_rescale_q(a->timecode, (AVRational){1, 10000000LL}, ctx->audio_st->time_base);
+    pkt.duration = av_rescale_q(1, (AVRational){a->no_samples, a->sample_rate}, ctx->audio_st->time_base);
+
+    av_log(ctx->avctx, AV_LOG_DEBUG, "%s: pkt.dts = pkt.pts = %"PRId64", duration=%"PRId64", timecode=%"PRId64"\n",
+        __FUNCTION__, pkt.dts, pkt.duration, a->timecode);
+
+    pkt.size         = 2 * a->no_samples * a->no_channels;
+    pkt.data         = (uint8_t *)av_mallocz(pkt.size);
+
+    pkt.flags       |= AV_PKT_FLAG_KEY;
+    pkt.stream_index = ctx->audio_st->index;
+
+    dst.reference_level = 0;
+    dst.p_data = (short*)pkt.data;
+    NDIlib_util_audio_to_interleaved_16s(a, &dst);
+
+    if (avpacket_queue_put(&ctx->queue, &pkt) < 0) {
+        ++ctx->dropped;
+    }
+
+    NDIlib_recv_free_audio(ctx->recv, a);
+
+    return 0;
+}
+
+static void* ndi_reader(void* p)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)p;
+
+    while (!ctx->f_stop)
+    {
+        NDIlib_video_frame_t v;
+        NDIlib_audio_frame_t a;
+        NDIlib_metadata_frame_t m;
+        NDIlib_frame_type_e t;
+
+        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
+
+        if (NDIlib_frame_type_video == t)
+            ndi_enqeue_video(ctx, &v);
+        else if (NDIlib_frame_type_audio == t)
+            ndi_enqeue_audio(ctx, &a);
+        else if (NDIlib_frame_type_metadata == t)
+            NDIlib_recv_free_metadata(ctx->recv, &m);
+    };
+
+    return NULL;
+};
+
+static int ndi_find_sources(AVFormatContext *avctx, const char* name, NDIlib_source_t *source_to_connect_to)
+{
+    unsigned int n, i, j = -1;
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    const NDIlib_source_t* ndi_srcs = NULL;
+    const NDIlib_find_create_t find_create_desc = { true, NULL };
+    NDIlib_find_instance_t ndi_find;
+
+    ndi_find = NDIlib_find_create2( &find_create_desc );
+    if (!ndi_find) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_find_create failed.\n");
+        return AVERROR(EIO);
+    }
+
+    NDIlib_find_wait_for_sources(ndi_find, ctx->wait_sources);
+    ndi_srcs = NDIlib_find_get_current_sources(ndi_find, &n);
+
+    av_log(avctx, AV_LOG_INFO, "Found %d NDI sources:\n", n);
+    for (i = 0; i < n; i++)
+    {
+        av_log(avctx, AV_LOG_INFO, "\t'%s'\t'%s'\n", ndi_srcs[i].p_ndi_name, ndi_srcs[i].p_ip_address);
+        if (!strcmp(name, ndi_srcs[i].p_ndi_name)) {
+            *source_to_connect_to = ndi_srcs[i];
+            j = i;
+        }
+    }
+
+    NDIlib_find_destroy(ndi_find);
+
+    return j;
+}
+
+static int ndi_read_header(AVFormatContext *avctx)
+{
+    int ret, i;
+    AVStream *st;
+    NDIlib_recv_create_t recv_create_desc;
+    const NDIlib_tally_t tally_state = { true, false };
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    ctx->avctx = avctx;
+
+    if (!NDIlib_initialize()) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");
+        return AVERROR(EIO);
+    }
+
+    /* Find available sources. */
+    ret = ndi_find_sources(avctx, avctx->filename, &recv_create_desc.source_to_connect_to);
+    if (ctx->find_sources) {
+        return AVERROR_EXIT;
+    }
+    if (ret < 0) {
+        return AVERROR(ENOENT);
+    }
+
+    /* Create receiver description */
+    recv_create_desc.color_format = NDIlib_recv_color_format_e_UYVY_RGBA;
+    recv_create_desc.bandwidth = NDIlib_recv_bandwidth_highest;
+    recv_create_desc.allow_video_fields = ctx->allow_video_fields;
+
+    /* Create the receiver */
+    ctx->recv = NDIlib_recv_create2(&recv_create_desc);
+    if (!ctx->recv) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_recv_create2 failed.\n");
+        return AVERROR(EIO);
+    }
+
+    /* Set tally */
+    NDIlib_recv_set_tally(ctx->recv, &tally_state);
+
+    avpacket_queue_init(avctx, &ctx->queue);
+
+    /* Probe input */
+    for (ret = 0, i = 0; i < ctx->max_frames_probe; i++)
+    {
+        NDIlib_video_frame_t v;
+        NDIlib_audio_frame_t a;
+        NDIlib_metadata_frame_t m;
+        NDIlib_frame_type_e t;
+
+        t = NDIlib_recv_capture(ctx->recv, &v, &a, &m, 40);
+
+        av_log(avctx, AV_LOG_DEBUG, "NDIlib_recv_capture[%d]=%d.\n", i, t);
+
+        if (NDIlib_frame_type_video == t) {
+            if (!ctx->video_st) {
+
+                st = avformat_new_stream(avctx, NULL);
+                if (!st) {
+                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
+                    ret = AVERROR(ENOMEM);
+                    goto error;
+                }
+
+                st->time_base.den               = v.frame_rate_N;
+                st->time_base.num               = v.frame_rate_D;
+                av_stream_set_r_frame_rate(st, av_make_q(st->time_base.den, st->time_base.num));
+                st->codecpar->codec_type        = AVMEDIA_TYPE_VIDEO;
+                st->codecpar->width             = v.xres;
+                st->codecpar->height            = v.yres;
+                st->codecpar->codec_id          = AV_CODEC_ID_RAWVIDEO;
+                st->codecpar->bit_rate          = av_rescale(v.xres * v.yres * 16, st->time_base.den, st->time_base.num);
+
+                if (NDIlib_FourCC_type_UYVY == v.FourCC || NDIlib_FourCC_type_UYVA == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_UYVY422;
+                    st->codecpar->codec_tag     = MKTAG('U', 'Y', 'V', 'Y');
+                } else if (NDIlib_FourCC_type_BGRA == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_BGRA;
+                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', 'A');
+                } else if (NDIlib_FourCC_type_BGRX == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_BGR0;
+                    st->codecpar->codec_tag     = MKTAG('B', 'G', 'R', '0');
+                } else if (NDIlib_FourCC_type_RGBA == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_RGBA;
+                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', 'A');
+                } else if (NDIlib_FourCC_type_RGBX == v.FourCC) {
+                    st->codecpar->format        = AV_PIX_FMT_RGB0;
+                    st->codecpar->codec_tag     = MKTAG('R', 'G', 'B', '0');
+                } else {
+                    av_log(avctx, AV_LOG_ERROR, "Unsupported video stream format, v.FourCC=%d\n", v.FourCC);
+                    ret = AVERROR(ENOTSUP);
+                    goto error;
+                }
+
+                ctx->interlaced = v.frame_format_type == NDIlib_frame_format_type_interleaved ? 1 : 0;
+
+                avpriv_set_pts_info(st, 64, v.frame_rate_D, v.frame_rate_N);
+
+                ctx->video_st = st;
+            }
+
+            ndi_enqeue_video(ctx, &v);
+        } else if (NDIlib_frame_type_audio == t) {
+            if (!ctx->audio_st) {
+
+                st = avformat_new_stream(avctx, NULL);
+                if (!st) {
+                    av_log(avctx, AV_LOG_ERROR, "Cannot add stream\n");
+                    ret = AVERROR(ENOMEM);
+                    goto error;
+                }
+
+                st->codecpar->codec_type        = AVMEDIA_TYPE_AUDIO;
+                st->codecpar->codec_id          = AV_CODEC_ID_PCM_S16LE;
+                st->codecpar->sample_rate       = a.sample_rate;
+                st->codecpar->channels          = a.no_channels;
+
+                avpriv_set_pts_info(st, 64, 1, a.sample_rate);  /* 64 bits pts in us */
+
+                ctx->audio_st = st;
+            }
+
+            ndi_enqeue_audio(ctx, &a);
+        } else if (NDIlib_frame_type_metadata == t) {
+
+            NDIlib_recv_free_metadata(ctx->recv, &m);
+        }
+
+        if (ctx->video_st && ctx->audio_st)
+            break;
+    }
+
+    if (ctx->video_st || ctx->audio_st)
+    {
+        ctx->f_started = 1;
+        pthread_create(&ctx->reader, NULL, ndi_reader, ctx);
+    }
+
+error:
+    return ret;
+}
+
+static int ndi_read_packet(AVFormatContext *avctx, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    AVFrame *frame = ctx->video_st ? ctx->video_st->codec->coded_frame : NULL;
+
+    avpacket_queue_get(&ctx->queue, pkt, 1);
+
+    if (frame && ctx->interlaced) {
+        frame->interlaced_frame = 1;
+        frame->top_field_first = 1;
+    }
+
+    return 0;
+
+}
+
+static int ndi_read_close(AVFormatContext *avctx)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    if (ctx->recv)
+    {
+        if (ctx->f_started)
+        {
+            ctx->f_stop = 1;
+            pthread_join(ctx->reader, NULL);
+        }
+
+        ctx->f_started = 0;
+        ctx->f_stop = 0;
+
+        avpacket_queue_end(&ctx->queue);
+        NDIlib_recv_destroy(ctx->recv);
+    };
+
+    return 0;
+}
+
+#define OFFSET(x) offsetof(struct ndi_ctx, x)
+#define DEC AV_OPT_FLAG_DECODING_PARAM
+
+static const AVOption options[] = {
+    { "find_sources", "Find available sources"  , OFFSET(find_sources), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, DEC },
+    { "wait_sources", "Time to wait until the number of online sources have changed (ms)"  , OFFSET(wait_sources), AV_OPT_TYPE_INT, { .i64 = 500 }, 100, 10000, DEC },
+    { "allow_video_fields", "When this flag is FALSE, all video that you receive will be progressive"  , OFFSET(allow_video_fields), AV_OPT_TYPE_INT, { .i64 = 1 }, 0, 1, DEC },
+    { "max_frames_probe", "Maximum frames for probe"  , OFFSET(max_frames_probe), AV_OPT_TYPE_INT, { .i64 = 25 }, 6, 100, DEC },
+    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, DEC | AV_OPT_FLAG_AUDIO_PARAM},
+    { NULL },
+};
+
+static const AVClass ndi_demuxer_class = {
+    .class_name = "NDI demuxer",
+    .item_name  = av_default_item_name,
+    .option     = options,
+    .version    = LIBAVUTIL_VERSION_INT,
+    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
+};
+
+AVInputFormat ff_ndi_demuxer = {
+    .name           = "ndi",
+    .long_name      = NULL_IF_CONFIG_SMALL("NDI input"),
+    .flags          = AVFMT_NOFILE,
+    .priv_class     = &ndi_demuxer_class,
+    .priv_data_size = sizeof(struct ndi_ctx),
+    .read_header   = ndi_read_header,
+    .read_packet   = ndi_read_packet,
+    .read_close    = ndi_read_close,
+};
diff --git a/libavdevice/ndi_enc.c b/libavdevice/ndi_enc.c
new file mode 100644
index 0000000..8cbdb9e
--- /dev/null
+++ b/libavdevice/ndi_enc.c
@@ -0,0 +1,289 @@ 
+/*
+ * NDI output
+ * Copyright (c) 2017 Maksym Veremeyenko
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include "libavformat/avformat.h"
+#include "libavformat/internal.h"
+#include "libavutil/opt.h"
+#include "libavutil/imgutils.h"
+
+#include <Processing.NDI.Lib.h>
+
+struct ndi_ctx {
+    const AVClass *cclass;
+
+    void *ctx;
+
+    /* Options */
+    int reference_level;
+
+    /* Streams present */
+    NDIlib_video_frame_t* video;
+    NDIlib_audio_frame_interleaved_16s_t* audio;
+
+    /* Status */
+    int64_t last_pts;
+
+    NDIlib_send_instance_t ndi_send;
+    AVFrame *last_avframe;
+};
+
+static int ndi_write_trailer(AVFormatContext *avctx)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    if (ctx->ndi_send)
+    {
+        if (ctx->last_avframe) {
+            NDIlib_send_send_video_async(ctx->ndi_send, NULL);
+            av_frame_free(&ctx->last_avframe);
+        }
+
+        NDIlib_send_destroy(ctx->ndi_send);
+    }
+
+    if (ctx->video)
+        av_freep(&ctx->video);
+
+    if (ctx->audio)
+        av_freep(&ctx->audio);
+
+    return 0;
+}
+
+static int ndi_write_video_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVFrame *avframe, *tmp = (AVFrame *)pkt->data;
+
+    if (tmp->format != AV_PIX_FMT_UYVY422 ||
+        tmp->width  != ctx->video->xres ||
+        tmp->height != ctx->video->yres) {
+        av_log(avctx, AV_LOG_ERROR, "Got a frame with invalid pixel format or dimension.\n");
+        av_log(avctx, AV_LOG_ERROR, "tmp->width=%d, tmp->height=%d, ctx->video->xres=%d, ctx->video->yres=%d\n", tmp->width, tmp->height, ctx->video->xres, ctx->video->yres);
+        return AVERROR(EINVAL);
+    }
+    avframe = av_frame_clone(tmp);
+    if (!avframe) {
+        av_log(avctx, AV_LOG_ERROR, "Could not clone video frame.\n");
+        return AVERROR(EIO);
+    }
+
+    ctx->video->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000LL});
+
+    ctx->video->line_stride_in_bytes = avframe->linesize[0] < 0
+        ? -avframe->linesize[0]
+        : avframe->linesize[0];
+    ctx->video->p_data = (avframe->linesize[0] < 0)
+        ? (void *)(avframe->data[0] + avframe->linesize[0] * (avframe->height - 1))
+        : (void *)(avframe->data[0]);
+
+    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
+        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
+
+    NDIlib_send_send_video_async(ctx->ndi_send, ctx->video);
+
+    if (ctx->last_avframe)
+        av_frame_free(&ctx->last_avframe);
+    ctx->last_avframe = avframe;
+
+    return 0;
+}
+
+static int ndi_write_audio_packet(AVFormatContext *avctx, AVStream *st, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+
+    ctx->audio->p_data = (void*)pkt->data;
+    ctx->audio->timecode = av_rescale_q(pkt->pts, st->time_base, (AVRational){1, 10000000});
+    ctx->audio->no_samples = pkt->size / (ctx->audio->no_channels << 1);
+
+    av_log(avctx, AV_LOG_DEBUG, "%s: pkt->pts=%"PRId64", timecode=%"PRId64", st->time_base=%d/%d\n",
+        __FUNCTION__, pkt->pts, ctx->video->timecode, st->time_base.num, st->time_base.den);
+
+    NDIlib_util_send_send_audio_interleaved_16s(ctx->ndi_send, ctx->audio);
+
+    return 0;
+}
+
+static int ndi_write_packet(AVFormatContext *avctx, AVPacket *pkt)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVStream *st = avctx->streams[pkt->stream_index];
+
+    ctx->last_pts = FFMAX(ctx->last_pts, pkt->pts);
+
+    if      (st->codecpar->codec_type == AVMEDIA_TYPE_VIDEO)
+        return ndi_write_video_packet(avctx, st, pkt);
+    else if (st->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
+        return ndi_write_audio_packet(avctx, st, pkt);
+
+    return AVERROR(EIO);
+}
+
+static int ndi_setup_audio(AVFormatContext *avctx, AVStream *st)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVCodecParameters *c = st->codecpar;
+
+    if (ctx->audio) {
+        av_log(avctx, AV_LOG_ERROR, "Only one audio stream is supported!\n");
+        return -1;
+    }
+    if (c->sample_rate != 48000) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported sample rate!"
+               " Only 48kHz is supported.\n");
+        return -1;
+    }
+    if (c->channels != 2 && c->channels != 4 && c->channels != 8) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported number of channels!"
+               " Only stereo and 7.1 are supported.\n");
+        return -1;
+    }
+
+    ctx->audio = (NDIlib_audio_frame_interleaved_16s_t *) av_mallocz(sizeof(NDIlib_audio_frame_interleaved_16s_t));
+
+    ctx->audio->sample_rate = c->sample_rate;
+    ctx->audio->no_channels = c->channels;
+    ctx->audio->reference_level = ctx->reference_level;
+
+    /* The device expects the sample rate to be fixed. */
+    avpriv_set_pts_info(st, 64, 1, c->sample_rate);
+
+    return 0;
+}
+
+static int ndi_setup_video(AVFormatContext *avctx, AVStream *st)
+{
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    AVCodecParameters *c = st->codecpar;
+    AVRational display_aspect_ratio;
+
+    if (ctx->video) {
+        av_log(avctx, AV_LOG_ERROR, "Only one video stream is supported!\n");
+        return AVERROR(ENOTSUP);
+    }
+
+    if (c->codec_id != AV_CODEC_ID_WRAPPED_AVFRAME) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported codec format!"
+               " Only AV_CODEC_ID_WRAPPED_AVFRAME is supported (-vcodec wrapped_avframe).\n");
+        return AVERROR(ENOTSUP);
+    }
+
+    if (c->format != AV_PIX_FMT_UYVY422) {
+        av_log(avctx, AV_LOG_ERROR, "Unsupported pixel format!"
+               " Only AV_PIX_FMT_UYVY422 is supported.\n");
+        return AVERROR(ENOTSUP);
+    }
+
+    ctx->video = (NDIlib_video_frame_t *) av_mallocz(sizeof(NDIlib_video_frame_t));
+
+    ctx->video->FourCC = NDIlib_FourCC_type_UYVY;
+    ctx->video->xres = c->width;
+    ctx->video->yres = c->height;
+    ctx->video->frame_rate_N = st->time_base.den;
+    ctx->video->frame_rate_D = st->time_base.num;
+    ctx->video->frame_format_type = AV_FIELD_PROGRESSIVE == c->field_order
+        ? NDIlib_frame_format_type_progressive
+        : NDIlib_frame_format_type_interleaved;
+
+    if (st->sample_aspect_ratio.num &&
+        av_cmp_q(st->sample_aspect_ratio, st->codecpar->sample_aspect_ratio)) {
+        av_reduce(&display_aspect_ratio.num, &display_aspect_ratio.den,
+                  st->codecpar->width  * (int64_t)st->sample_aspect_ratio.num,
+                  st->codecpar->height * (int64_t)st->sample_aspect_ratio.den,
+                  1024 * 1024);
+        ctx->video->picture_aspect_ratio = av_q2d(display_aspect_ratio);
+    }
+    else
+        ctx->video->picture_aspect_ratio = 16.0 / 9.0;
+
+    /* The device expects the framerate to be fixed. */
+    avpriv_set_pts_info(st, 64, st->time_base.num, st->time_base.den);
+
+    return 0;
+}
+
+static int ndi_write_header(AVFormatContext *avctx)
+{
+    int ret = 0;
+    unsigned int n;
+    struct ndi_ctx *ctx = (struct ndi_ctx *)avctx->priv_data;
+    const NDIlib_send_create_t ndi_send_desc = {avctx->filename, NULL, true, false};
+
+    /* check if streams compatible */
+    for (n = 0; n < avctx->nb_streams; n++) {
+        AVStream *st = avctx->streams[n];
+        AVCodecParameters *c = st->codecpar;
+        if        (c->codec_type == AVMEDIA_TYPE_AUDIO) {
+            if ((ret = ndi_setup_audio(avctx, st)))
+                goto error;
+        } else if (c->codec_type == AVMEDIA_TYPE_VIDEO) {
+            if ((ret = ndi_setup_video(avctx, st)))
+                goto error;
+        } else {
+            av_log(avctx, AV_LOG_ERROR, "Unsupported stream type.\n");
+            ret = AVERROR(EIO);
+            goto error;
+        }
+    }
+
+    if (!NDIlib_initialize()) {
+        av_log(avctx, AV_LOG_ERROR, "NDIlib_initialize failed.\n");
+    } else {
+        ctx->ndi_send = NDIlib_send_create(&ndi_send_desc);
+        if (!ctx->ndi_send)
+            av_log(avctx, AV_LOG_ERROR, "Failed to create NDI output %s\n", avctx->filename);
+        else
+            ret = 0;
+    }
+
+error:
+    return ret;
+}
+
+#define OFFSET(x) offsetof(struct ndi_ctx, x)
+static const AVOption options[] = {
+
+    { "reference_level", "The audio reference level in dB"  , OFFSET(reference_level), AV_OPT_TYPE_INT, { .i64 = 0 }, -20, 20, AV_OPT_FLAG_ENCODING_PARAM | AV_OPT_FLAG_AUDIO_PARAM},
+    { NULL },
+};
+
+static const AVClass ndi_muxer_class = {
+    .class_name = "NDI muxer",
+    .item_name  = av_default_item_name,
+    .option     = options,
+    .version    = LIBAVUTIL_VERSION_INT,
+    .category   = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT,
+};
+
+AVOutputFormat ff_ndi_muxer = {
+    .name           = "ndi",
+    .long_name      = NULL_IF_CONFIG_SMALL("NDI output"),
+    .audio_codec    = AV_CODEC_ID_PCM_S16LE,
+    .video_codec    = AV_CODEC_ID_WRAPPED_AVFRAME,
+    .subtitle_codec = AV_CODEC_ID_NONE,
+    .flags          = AVFMT_NOFILE,
+    .priv_class     = &ndi_muxer_class,
+    .priv_data_size = sizeof(struct ndi_ctx),
+    .write_header   = ndi_write_header,
+    .write_packet   = ndi_write_packet,
+    .write_trailer  = ndi_write_trailer,
+};
-- 
1.8.3.1