From 3a20c35229095f152980ba66c7ec2aa7712f1c4c Mon Sep 17 00:00:00 2001
From: Michael Zucchi <notzed@gmail.com>
Date: Sat, 8 Jun 2019 21:46:13 +0930
Subject: [PATCH] avdevice: Added Kinect for Windows V2 device via
libfreenect2.
---
Changelog | 1 +
configure | 4 +
doc/general.texi | 11 ++
doc/indevs.texi | 93 ++++++++++
libavdevice/Makefile | 1 +
libavdevice/alldevices.c | 1 +
libavdevice/freenect2_dec.cpp | 414 ++++++++++++++++++++++++++++++++++++++++++
libavdevice/freenect2_dec.h | 80 ++++++++
libavdevice/freenect2_dec_c.c | 75 ++++++++
libavdevice/version.h | 2 +-
10 files changed, 681 insertions(+), 1 deletion(-)
create mode 100644 libavdevice/freenect2_dec.cpp
create mode 100644 libavdevice/freenect2_dec.h
create mode 100644 libavdevice/freenect2_dec_c.c
@@ -31,6 +31,7 @@ version <next>:
- xmedian filter
- asr filter
- showspatial multimedia filter
+- libfreenect2 based Kinect for Windows V2 device
version 4.1:
@@ -1744,6 +1744,7 @@ EXTERNAL_LIBRARY_LIST="
$EXTERNAL_LIBRARY_VERSION3_LIST
$EXTERNAL_LIBRARY_GPLV3_LIST
chromaprint
+ freenect2
gcrypt
gnutls
jni
@@ -3309,6 +3310,8 @@ dshow_indev_deps="IBaseFilter"
dshow_indev_extralibs="-lpsapi -lole32 -lstrmiids -luuid -loleaut32 -lshlwapi"
fbdev_indev_deps="linux_fb_h"
fbdev_outdev_deps="linux_fb_h"
+freenect2_indev_deps="freenect2"
+freenect2_indev_extralibs="-lstdc++"
gdigrab_indev_deps="CreateDIBSection"
gdigrab_indev_extralibs="-lgdi32"
gdigrab_indev_select="bmp_decoder"
@@ -6125,6 +6128,7 @@ enabled chromaprint && require chromaprint chromaprint.h chromaprint_get_v
enabled decklink && { require_headers DeckLinkAPI.h &&
{ test_cpp_condition DeckLinkAPIVersion.h "BLACKMAGIC_DECKLINK_API_VERSION >= 0x0a090500" || die "ERROR: Decklink API version must be >= 10.9.5."; } }
enabled frei0r && require_headers "frei0r.h dlfcn.h"
+enabled freenect2 && require_pkg_config freenect2 "freenect2 >= 0.3.0" stdio.h fopen
enabled gmp && require gmp gmp.h mpz_export -lgmp
enabled gnutls && require_pkg_config gnutls gnutls gnutls/gnutls.h gnutls_global_init
enabled jni && { [ $target_os = "android" ] && check_headers jni.h && enabled pthreads || die "ERROR: jni not found"; }
@@ -151,6 +151,16 @@ Go to @url{http://lame.sourceforge.net/} and follow the
instructions for installing the library.
Then pass @code{--enable-libmp3lame} to configure to enable it.
+@section libfreenect2
+
+libfreenect2 is a reverse engineered library that can read video and
+depth data from Kinect for Windows V2 cameras. FFmpeg can make use of
+the libfreenect2 library to read these video streams.
+
+Go to @url{https://github.com/OpenKinect/libfreenect2} and follow the
+instructions for installing the library. Then pass
+@code{--enable-freenect2} to configure to enable the freenect2 device.
+
@section libilbc
iLBC is a narrowband speech codec that has been made freely available
@@ -1340,6 +1350,7 @@ performance on systems without hardware floating point support).
@item BKTR @tab X @tab
@item caca @tab @tab X
@item DV1394 @tab X @tab
+@item Kinect for Windows V2 @tab X @tab
@item Lavfi virtual device @tab X @tab
@item Linux framebuffer @tab X @tab X
@item JACK @tab X @tab
@@ -657,6 +657,99 @@ Set the frame rate. Default is 25.
@end table
+@section freenect2
+
+Kinect for Windows V2 camera using libfreenect2. This is a
+cross-platform driver and not restricted to Microsoft Windows.
+
+This driver supports colour, infra-red, and depth frames encoded as
+separate streams.
+
+The colour signal is the data directly from the camera which is
+individually encoded jpeg frames. The resolution is 1920x1080 at 30
+frames per second.
+
+Both infra-red and depth are received by the host in an encoded format
+which is decoded by libfreenect2. These are presented as raw 16
+bit-per-pixel greyscale images within their respective streams. The
+depth values are in 0.1mm units. The resolution of each of these
+streams is 512x424 at 30 frames per second.
+
+See also @url{https://github.com/OpenKinect/libfreenect2}.
+
+@subsection Options
+
+The camera to open is supplied by the input filename. It is either a
+single digit which is the index of the camera (as determined by
+libfreenect2) or the serial number. By default the first camera is
+opened.
+
+The @code{-sources} option of ffmpeg can be used to list the serial
+numbers of connected cameras.
+
+@table @option
+
+@item capture
+Selects which streams are captured. This is a bit-field where
+@option{1}=rgb, @option{2}=infra-red, and @option{4}=depth. Symbolic
+constants @option{rgb}, @option{ir}, and @option{depth} can be used
+when only one channel is required. Decoding the infra-red and depth
+streams requires significant additional processing which requires GPU
+acceleration to achieve real-time performance. Default is 7 (all
+streams).
+
+@item pipeline
+Runtime selection for the the processing pipeline used to decode the
+infra-red and depth data. The available pipelines depend on the
+libfreenect2 build options. Possible values are @option{auto} for any
+available pipeline, a gpu-accelerated pipeline will be used if
+possible. @option{cpu} for only cpu, @option{opengl} to use OpenGL,
+@option{opencl} to use OpenCL, @option{cuda} for CUDA. Default is
+@option{auto}.
+
+@item device
+Device identifier used for OpenCL or CUDA pipeline. This is passed
+directly to libfreenect2. Default is 0.
+
+@item auto_exposure
+Sets the camera colour sensor auto-exposure value. Default is 0.0.
+
+@item pseudo_exposure
+Sets the camera colour sensor exposure time moderated by power-line
+frequency. This is off by default.
+
+@item manual_exposure, manual_gain
+Set the camera colour sensor to manual exposure and gain. This is off
+by default.
+
+@end table
+
+@subsection Examples
+
+@itemize
+@item
+Capture only the colour stream and write to a motion jpeg file.
+@example
+ffmpeg -f freenect2 -capture rgb -i 0 -c:v copy output.mov
+@end example
+
+@item
+Capture all streams with minimal overhead. Infra-red and depth are
+stored as raw 16-bit frames which has limited container support.
+@example
+ffmpeg -f freenect2 -i 0 -map 0:0 -c:v:0 copy -map 0:1 -pix_fmt:1 gray16be -c:v:1 rawvideo -map 0:2 -pix_fmt:2 gray16be -c:v:2 rawvideo output.mov
+@end example
+
+@item
+Capture all streams with compression. Only limited codecs support the
+16-bit greyscale required, these include jpegls, jpeg2000, ffv1, and
+png.
+@example
+ffmpeg -y -f freenect2 -i 0 -map 0:0 -c:v:0 copy -map 0:1 -c:v:1 jpegls -map 0:2 -c:v:2 jpegls output.mov
+@end example
+
+@end itemize
+
@section gdigrab
Win32 GDI-based screen capture device.
@@ -30,6 +30,7 @@ OBJS-$(CONFIG_FBDEV_OUTDEV) += fbdev_enc.o \
OBJS-$(CONFIG_GDIGRAB_INDEV) += gdigrab.o
OBJS-$(CONFIG_IEC61883_INDEV) += iec61883.o
OBJS-$(CONFIG_JACK_INDEV) += jack.o timefilter.o
+OBJS-$(CONFIG_FREENECT2_INDEV) += freenect2_dec.o freenect2_dec_c.o
OBJS-$(CONFIG_KMSGRAB_INDEV) += kmsgrab.o
OBJS-$(CONFIG_LAVFI_INDEV) += lavfi.o
OBJS-$(CONFIG_OPENAL_INDEV) += openal-dec.o
@@ -35,6 +35,7 @@ extern AVOutputFormat ff_decklink_muxer;
extern AVInputFormat ff_dshow_demuxer;
extern AVInputFormat ff_fbdev_demuxer;
extern AVOutputFormat ff_fbdev_muxer;
+extern AVInputFormat ff_freenect2_demuxer;
extern AVInputFormat ff_gdigrab_demuxer;
extern AVInputFormat ff_iec61883_demuxer;
extern AVInputFormat ff_jack_demuxer;
new file mode 100644
@@ -0,0 +1,414 @@
+/*
+ * Copyright (c) 2019 Michael Zucchi <notzed@gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+/**
+ * @file
+ * Freenect2 input device.
+ * The c++ integration is based on decklink_dec
+ */
+
+#include <libfreenect2/libfreenect2.hpp>
+#include <libfreenect2/frame_listener_impl.h>
+#include <libfreenect2/logger.h>
+
+extern "C" {
+#include "libavutil/log.h"
+#include "libavutil/mem.h"
+#include "libavutil/time.h"
+#include "libavutil/pixdesc.h"
+#include "libavutil/bswap.h"
+#include "avdevice.h"
+}
+#include "freenect2_dec.h"
+
+typedef struct freenect2_ctx freenect2_ctx;
+
+/**
+ * Custom logger.
+ * libfreenect2's info is very noisy to it is mapped to debug level.
+ */
+class FN2Logger: public libfreenect2::Logger {
+private:
+ void *ctx;
+
+ int mapLevel(Level level) {
+ switch (level) {
+ case None:
+ return AV_LOG_QUIET;
+ case Error: // 1->16
+ return AV_LOG_ERROR;
+ case Warning: // 2->24
+ return AV_LOG_WARNING;
+ case Info: // 3->32
+ case Debug:
+ default:
+ return AV_LOG_DEBUG;
+ }
+ }
+public:
+ FN2Logger(void *ctx) {
+ this->ctx = ctx;
+ }
+
+ virtual void log(Level level, const std::string &message) {
+ av_log(ctx, mapLevel(level), "%s\n", message.c_str());
+ }
+};
+
+static int create_pipeline(AVFormatContext *avctx) {
+ freenect2_ctx *ktx = (freenect2_ctx *)avctx->priv_data;
+
+ switch (ktx->pipelineid) {
+ case FN2_ANY:
+#if defined(LIBFREENECT2_WITH_OPENGL_SUPPORT)
+ ktx->pipeline = new libfreenect2::OpenGLPacketPipeline(0, false, true);
+#endif
+#if defined(LIBFREENECT2_WITH_OPENCL_SUPPORT)
+ if (!ktx->pipeline)
+ ktx->pipeline = new libfreenect2::OpenCLPacketPipeline(ktx->deviceid, true);
+#endif
+#if defined(LIBFREENECT2_WITH_CUDA_SUPPORT)
+ if (!ktx->pipeline)
+ ktx->pipeline = new libfreenect2::CudaPacketPipeline(ktx->deviceid, true);
+#endif
+ if (!ktx->pipeline)
+ ktx->pipeline = new libfreenect2::CpuPacketPipeline(true);
+ break;
+
+ case FN2_CPU:
+ ktx->pipeline = new libfreenect2::CpuPacketPipeline(true);
+ break;
+
+ case FN2_OPENGL:
+#if defined(LIBFREENECT2_WITH_OPENGL_SUPPORT)
+ ktx->pipeline = new libfreenect2::OpenGLPacketPipeline(0, false, true);
+#else
+ av_log(avctx, AV_LOG_ERROR, "OpenGL Pipeline not supported");
+ return AVERROR(EINVAL);
+#endif
+
+ case FN2_OPENCL:
+#if defined(LIBFREENECT2_WITH_OPENCL_SUPPORT)
+ ktx->pipeline = new libfreenect2::OpenCLPacketPipeline(ktx->deviceid, true);
+#else
+ av_log(avctx, AV_LOG_ERROR, "OpenCL Pipeline not supported");
+ return AVERROR(EINVAL);
+#endif
+
+ case FN2_CUDA:
+#if defined(LIBFREENECT2_WITH_CUDA_SUPPORT)
+ ktx->pipeline = new libfreenect2::CudaPacketPipeline(ktx->deviceid, true);
+#else
+ av_log(avctx, AV_LOG_ERROR, "CUDA Pipeline not supported");
+ return AVERROR(EINVAL);
+#endif
+ }
+ if (!ktx->pipeline)
+ return AVERROR(ENOMEM);
+
+ return 0;
+}
+
+int av_cold ff_freenect2_read_header(AVFormatContext *avctx)
+{
+ freenect2_ctx *ktx = (freenect2_ctx *)avctx->priv_data;
+ AVStream *st = NULL;
+ int res;
+
+ libfreenect2::setGlobalLogger(new FN2Logger(avctx));
+
+ av_log(avctx, AV_LOG_DEBUG, "open device '%s' capture=%d pipeline=%d\n", avctx->url, ktx->capture, ktx->pipelineid);
+
+ ktx->fn = new libfreenect2::Freenect2();
+ if (!ktx->fn)
+ return AVERROR(ENOMEM);
+
+ res = create_pipeline(avctx);
+ if (res != 0)
+ goto fail;
+
+ // Open default, by index, or by serial no.
+ if (!avctx->url[0])
+ ktx->dev = ktx->fn->openDevice(ktx->fn->getDefaultDeviceSerialNumber(), ktx->pipeline);
+ else if (strlen(avctx->url) < 4)
+ ktx->dev = ktx->fn->openDevice(atoi(avctx->url), ktx->pipeline);
+ else {
+ std::string ser(avctx->url);
+ ktx->dev = ktx->fn->openDevice(ser, ktx->pipeline);
+ }
+ if (!ktx->dev) {
+ res = AVERROR(ENOENT);
+ goto fail;
+ }
+
+ // Ideally(?) could use an asynchronous listener, this makes it simple
+ ktx->listener = new libfreenect2::SyncMultiFrameListener(ktx->capture);
+ if (!ktx->listener) {
+ res = AVERROR(ENOMEM);
+ goto fail;
+ }
+ ktx->dev->setColorFrameListener(ktx->listener);
+ ktx->dev->setIrAndDepthFrameListener(ktx->listener);
+
+ ktx->frames = new libfreenect2::FrameMap();
+ if (!ktx->frames) {
+ res = AVERROR(ENOMEM);
+ goto fail;
+ }
+
+ ktx->state = FN2_READY;
+
+ // RGB video
+ if (ktx->capture & libfreenect2::Frame::Color) {
+ ktx->rgb_index = avctx->nb_streams;
+ st = avformat_new_stream(avctx, NULL);
+ if (!st) {
+ res = AVERROR(ENOMEM);
+ goto fail;
+ }
+ st->id = 0;
+ st->codecpar->format = AV_PIX_FMT_NONE;
+ st->codecpar->width = 1920;
+ st->codecpar->height = 1080;
+ st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
+ st->codecpar->codec_id = AV_CODEC_ID_MJPEG;
+ st->codecpar->bit_rate = 8 * 1920 * 1080;
+ st->avg_frame_rate = (AVRational){30, 1};
+ st->r_frame_rate = st->avg_frame_rate;
+ st->time_base = (AVRational){125, 1000000};
+ }
+
+ // IR video
+ if (ktx->capture & libfreenect2::Frame::Ir) {
+ ktx->ir_index = avctx->nb_streams;
+ st = avformat_new_stream(avctx, NULL);
+ if (!st) {
+ res = AVERROR(ENOMEM);
+ goto fail;
+ }
+ st->id = 1;
+ st->codecpar->format = AV_PIX_FMT_GRAY16LE;
+ st->codecpar->width = 512;
+ st->codecpar->height = 424;
+ st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
+ st->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
+ st->codecpar->codec_tag = avcodec_pix_fmt_to_codec_tag((AVPixelFormat)st->codecpar->format);
+ st->codecpar->bit_rate = 16 * 512 * 424;
+ st->avg_frame_rate = (AVRational){30, 1};
+ st->r_frame_rate = st->avg_frame_rate;
+ st->time_base = (AVRational){125, 1000000};
+ }
+
+ // Depth video
+ if (ktx->capture & libfreenect2::Frame::Depth) {
+ ktx->depth_index = avctx->nb_streams;
+ st = avformat_new_stream(avctx, NULL);
+ if (!st) {
+ res = AVERROR(ENOMEM);
+ goto fail;
+ }
+ st->id = 2;
+ st->codecpar->format = AV_PIX_FMT_GRAY16LE;
+ st->codecpar->width = 512;
+ st->codecpar->height = 424;
+ st->codecpar->codec_type = AVMEDIA_TYPE_VIDEO;
+ st->codecpar->codec_id = AV_CODEC_ID_RAWVIDEO;
+ st->codecpar->codec_tag = avcodec_pix_fmt_to_codec_tag((AVPixelFormat)st->codecpar->format);
+ st->codecpar->bit_rate = 16 * 512 * 424;
+ st->avg_frame_rate = (AVRational){30, 1};
+ st->r_frame_rate = st->avg_frame_rate;
+ st->time_base = (AVRational){125, 1000000};
+ }
+
+ return 0;
+
+ fail:
+ delete(ktx->frames);
+ delete(ktx->listener);
+ delete(ktx->dev);
+ delete(ktx->fn);
+
+ return res;
+}
+
+int ff_freenect2_read_packet(AVFormatContext *avctx, AVPacket *pkt)
+{
+ freenect2_ctx *ktx = (freenect2_ctx *)avctx->priv_data;
+ int res;
+ libfreenect2::Frame *frame;
+ float *fdata;
+ unsigned short *sdata;
+
+ if (ktx->state < FN2_START) {
+ if (!ktx->dev->startStreams
+ ((ktx->capture & libfreenect2::Frame::Color) != 0,
+ (ktx->capture & (libfreenect2::Frame::Ir | libfreenect2::Frame::Depth)) != 0))
+ return AVERROR(EIO);
+ ktx->state = FN2_START;
+
+ // Settings must be applied after start
+ if (ktx->capture & libfreenect2::Frame::Color) {
+ if (ktx->auto_exposure != 0.0f) {
+ ktx->dev->setColorAutoExposure(ktx->auto_exposure);
+ } else if (ktx->pseudo_exposure != 0.0f) {
+ ktx->dev->setColorSemiAutoExposure(ktx->pseudo_exposure);
+ } else if (ktx->manual_gain != 1.0f || ktx->manual_exposure != 0.0f) {
+ ktx->dev->setColorManualExposure(ktx->manual_exposure, ktx->manual_gain);
+ }
+ }
+ }
+
+ // Wait for all frames to be ready
+ if (ktx->state == FN2_START) {
+ if (!ktx->listener->waitForNewFrame(*ktx->frames, 3*1000))
+ return 0;
+
+ if (ktx->capture & libfreenect2::Frame::Color)
+ ktx->state = FN2_RGB;
+ else if (ktx->capture & libfreenect2::Frame::Ir)
+ ktx->state = FN2_IR;
+ else
+ ktx->state = FN2_DEPTH;
+ }
+
+ switch (ktx->state) {
+ case FN2_RGB:
+ // RGB Packets are in Format::Raw and JPEG format.
+ frame = (*ktx->frames)[libfreenect2::Frame::Color];
+ if ((res = av_new_packet(pkt, frame->bytes_per_pixel)))
+ return res;
+
+ memcpy(pkt->data, frame->data, frame->bytes_per_pixel);
+
+ pkt->stream_index = ktx->rgb_index;
+ pkt->pts = frame->timestamp;
+
+ if (ktx->capture & libfreenect2::Frame::Ir)
+ ktx->state = FN2_IR;
+ else if (ktx->capture & libfreenect2::Frame::Depth)
+ ktx->state = FN2_DEPTH;
+ else {
+ ktx->listener->release(*ktx->frames);
+ ktx->state = FN2_START;
+ }
+ break;
+ case FN2_IR:
+ // IR Packets are Format::Float, 0.0 - 65535.0
+ frame = (*ktx->frames)[libfreenect2::Frame::Ir];
+ if ((res = av_new_packet(pkt, frame->width * frame->height * 2)))
+ return res;
+
+ fdata = (float *)frame->data;
+ sdata = (unsigned short *)pkt->data;
+ for (unsigned int i=0;i<frame->width * frame->height;i++)
+ sdata[i] = (unsigned short)fdata[i];
+
+ pkt->stream_index = ktx->ir_index;
+ pkt->pts = frame->timestamp;
+
+ if (ktx->capture & libfreenect2::Frame::Depth)
+ ktx->state = FN2_DEPTH;
+ else {
+ ktx->listener->release(*ktx->frames);
+ ktx->state = FN2_START;
+ }
+ break;
+ case FN2_DEPTH:
+ // Depth Packets are Format::Float, in mm. Convert to 0.1mm units.
+ frame = (*ktx->frames)[libfreenect2::Frame::Depth];
+ if ((res = av_new_packet(pkt, frame->width * frame->height * 2)))
+ return res;
+
+ fdata = (float *)frame->data;
+ sdata = (unsigned short *)pkt->data;
+ for (unsigned int i=0;i<frame->width * frame->height;i++)
+ sdata[i] = (unsigned short)(10.0f * fdata[i]);
+
+ pkt->stream_index = ktx->depth_index;
+ pkt->pts = frame->timestamp;
+
+ ktx->listener->release(*ktx->frames);
+ ktx->state = FN2_START;
+ break;
+ }
+
+ return 0;
+}
+
+int av_cold ff_freenect2_read_close(AVFormatContext *avctx)
+{
+ freenect2_ctx *ktx = (freenect2_ctx *)avctx->priv_data;
+
+ ktx->dev->stop();
+ ktx->dev->close();
+
+ if (ktx->state > FN2_START)
+ ktx->listener->release(*ktx->frames);
+
+ delete(ktx->frames);
+ delete(ktx->listener);
+ delete(ktx->dev);
+ delete(ktx->fn);
+
+ return 0;
+}
+
+int ff_freenect2_get_device_list(AVFormatContext *s, AVDeviceInfoList *device_list)
+{
+ FN2Logger logger(s);
+ libfreenect2::setGlobalLogger(&logger);
+
+ libfreenect2::Freenect2 fn;
+ int len = fn.enumerateDevices();
+ std::string def = fn.getDefaultDeviceSerialNumber();
+ int res = 0;
+ AVDeviceInfo *device = NULL;
+
+ for (int i=0;i<len;i++) {
+ std::string ser = fn.getDeviceSerialNumber(i);
+
+ device = (AVDeviceInfo *)av_mallocz(sizeof(*device));
+ if (!device) {
+ res = AVERROR(ENOMEM);
+ goto fail;
+ }
+
+ device->device_name = av_strdup(ser.c_str());
+ device->device_description = av_strdup("libfreenect2 Device");
+ if (!device->device_name || !device->device_description) {
+ res = AVERROR(ENOMEM);
+ goto fail;
+ }
+
+ if ((res = av_dynarray_add_nofree(&device_list->devices, &device_list->nb_devices, device)) < 0)
+ goto fail;
+
+ if (ser == def)
+ device_list->default_device = device_list->nb_devices - 1;
+ }
+ device = NULL;
+ fail:
+ if (device) {
+ av_free(device->device_name);
+ av_free(device->device_description);
+ av_free(device);
+ }
+ return res;
+}
new file mode 100644
@@ -0,0 +1,80 @@
+/*
+ * Copyright (c) 2019 Michael Zucchi <notzed@gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#ifndef AVDEVICE_FREENECT2_DEC_H
+#define AVDEVICE_FREENECT2_DEC_H
+
+#ifdef __cplusplus
+extern "C" {
+#define FNSTRUCT libfreenect2::
+#else
+#define FNSTRUCT struct
+#endif
+
+int ff_freenect2_read_header(AVFormatContext *avctx);
+int ff_freenect2_read_packet(AVFormatContext *avctx, AVPacket *pkt);
+int ff_freenect2_read_close(AVFormatContext *avctx);
+int ff_freenect2_get_device_list(AVFormatContext *s, struct AVDeviceInfoList *device_list);
+
+enum freenect2_state {
+ FN2_READY = 1,
+ FN2_START,
+ FN2_RGB,
+ FN2_IR,
+ FN2_DEPTH
+};
+
+enum freenect2_pipeline {
+ FN2_ANY,
+ FN2_CPU,
+ FN2_OPENGL,
+ FN2_OPENCL,
+ FN2_CUDA
+};
+
+struct freenect2_ctx {
+ const AVClass *cclass;
+
+ enum freenect2_state state;
+ int rgb_index, ir_index, depth_index;
+
+ // libfrreenect2::Type bitset of frames to capture, 1, 2, 4
+ int capture;
+ // Basic camera settings.
+ float auto_exposure;
+ float pseudo_exposure;
+ float manual_exposure, manual_gain;
+ // processing pipeline to use
+ enum freenect2_pipeline pipelineid;
+ // gpu id
+ int deviceid;
+
+ FNSTRUCT Freenect2 *fn;
+ FNSTRUCT PacketPipeline *pipeline;
+ FNSTRUCT Freenect2Device *dev;
+ FNSTRUCT SyncMultiFrameListener *listener;
+ FNSTRUCT FrameMap *frames;
+};
+
+#ifdef __cplusplus
+} /* extern "C" */
+#endif
+
+#endif
new file mode 100644
@@ -0,0 +1,75 @@
+/*
+ * Copyright (c) 2019 Michael Zucchi <notzed@gmail.com>
+ *
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+/**
+ * @file
+ * kinect2 driver setup and config
+ */
+
+#include "libavformat/avformat.h"
+#include "libavutil/opt.h"
+#include "freenect2_dec.h"
+
+#define OFF(x) offsetof(struct freenect2_ctx, x)
+#define DEC AV_OPT_FLAG_DECODING_PARAM
+
+static const AVOption options[] = {
+ { "capture", "Streams to capture", OFF(capture), AV_OPT_TYPE_INT, {.i64 = 7 }, 1, 7, DEC, "capture" },
+ { "pipeline", "Processing Pipeline", OFF(pipelineid), AV_OPT_TYPE_INT, {.i64 = FN2_ANY }, FN2_ANY, FN2_CUDA, DEC, "pipeline" },
+
+ { "device", "GPU Device ID", OFF(deviceid), AV_OPT_TYPE_INT, {.i64 = 0 }, 0.0, 128.0, DEC },
+
+ { "auto_exposure", "", OFF(auto_exposure), AV_OPT_TYPE_FLOAT, {.dbl = 0.0 }, -2.0, 2.0, DEC },
+ { "pseudo_exposure", "", OFF(pseudo_exposure), AV_OPT_TYPE_FLOAT, {.dbl = 0.0 }, 0.0, 640.0, DEC },
+ { "manual_exposure", "", OFF(manual_exposure), AV_OPT_TYPE_FLOAT, {.dbl = 0.0 }, 0.0, 66.0, DEC },
+ { "manual_gain", "", OFF(manual_gain), AV_OPT_TYPE_FLOAT, {.dbl = 1.0 }, 1.0, 4.0, DEC },
+
+ { "rgb", "", 0, AV_OPT_TYPE_CONST, {.i64 = 1}, 0, 0, DEC, "capture" },
+ { "ir", "", 0, AV_OPT_TYPE_CONST, {.i64 = 2}, 0, 0, DEC, "capture" },
+ { "depth", "", 0, AV_OPT_TYPE_CONST, {.i64 = 4}, 0, 0, DEC, "capture" },
+
+ { "auto", "Automatic", 0, AV_OPT_TYPE_CONST, {.i64 = FN2_ANY}, 0, 0, DEC, "pipeline" },
+ { "cpu", "CPU", 0, AV_OPT_TYPE_CONST, {.i64 = FN2_CPU}, 0, 0, DEC, "pipeline" },
+ { "opengl", "OpenGL", 0, AV_OPT_TYPE_CONST, {.i64 = FN2_OPENGL}, 0, 0, DEC, "pipeline" },
+ { "opencl", "OpenCL", 0, AV_OPT_TYPE_CONST, {.i64 = FN2_OPENCL}, 0, 0, DEC, "pipeline" },
+ { "cuda", "CUDA", 0, AV_OPT_TYPE_CONST, {.i64 = FN2_CUDA}, 0, 0, DEC, "pipeline" },
+
+ { NULL },
+};
+
+static const AVClass freenect2_class = {
+ .class_name = "freenect2 indev",
+ .item_name = av_default_item_name,
+ .option = options,
+ .version = LIBAVUTIL_VERSION_INT,
+ .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_INPUT,
+};
+
+AVInputFormat ff_freenect2_demuxer = {
+ .name = "freenect2",
+ .long_name = NULL_IF_CONFIG_SMALL("Kinect V2 with libfreenect2"),
+ .priv_data_size = sizeof(struct freenect2_ctx),
+ .read_header = ff_freenect2_read_header,
+ .read_packet = ff_freenect2_read_packet,
+ .read_close = ff_freenect2_read_close,
+ .get_device_list = ff_freenect2_get_device_list,
+ .flags = AVFMT_NOFILE,
+ .priv_class = &freenect2_class,
+};
@@ -28,7 +28,7 @@
#include "libavutil/version.h"
#define LIBAVDEVICE_VERSION_MAJOR 58
-#define LIBAVDEVICE_VERSION_MINOR 7
+#define LIBAVDEVICE_VERSION_MINOR 8
#define LIBAVDEVICE_VERSION_MICRO 100
#define LIBAVDEVICE_VERSION_INT AV_VERSION_INT(LIBAVDEVICE_VERSION_MAJOR, \
--
2.7.4