[FFmpeg-devel] [PATCH 1/6] libavutil: add support for AV_HWDEVICE_TYPE_D3D11VA
Steve Lhomme
robux4 at gmail.com
Wed Jan 4 15:52:38 EET 2017
---
doc/APIchanges | 3 +
libavutil/Makefile | 3 +
libavutil/hwcontext.c | 3 +
libavutil/hwcontext.h | 1 +
libavutil/hwcontext_d3d11va.c | 461 +++++++++++++++++++++++++++++++++++++++++
libavutil/hwcontext_d3d11va.h | 70 +++++++
libavutil/hwcontext_internal.h | 1 +
libavutil/version.h | 2 +-
8 files changed, 543 insertions(+), 1 deletion(-)
create mode 100644 libavutil/hwcontext_d3d11va.c
create mode 100644 libavutil/hwcontext_d3d11va.h
diff --git a/doc/APIchanges b/doc/APIchanges
index fbeae7a..76d1c8e 100644
--- a/doc/APIchanges
+++ b/doc/APIchanges
@@ -15,6 +15,9 @@ libavutil: 2015-08-28
API changes, most recent first:
++2017-xx-xx - xxxxxxx - lavu 55.43.101 - hwcontext.h
++ Add AV_HWDEVICE_TYPE_D3D11VA to decode using Direct3D11.
++
2016-12-10 - xxxxxxx - lavu xx.xx.100- imgutils.h
Add av_image_check_size2()
diff --git a/libavutil/Makefile b/libavutil/Makefile
index 15d95de..376a11b 100644
--- a/libavutil/Makefile
+++ b/libavutil/Makefile
@@ -34,6 +34,7 @@ HEADERS = adler32.h \
hmac.h \
hwcontext.h \
hwcontext_cuda.h \
+ hwcontext_d3d11va.h \
hwcontext_dxva2.h \
hwcontext_qsv.h \
hwcontext_vaapi.h \
@@ -156,6 +157,7 @@ OBJS = adler32.o \
OBJS-$(!HAVE_ATOMICS_NATIVE) += atomic.o \
OBJS-$(CONFIG_CUDA) += hwcontext_cuda.o
+OBJS-$(CONFIG_D3D11VA) += hwcontext_d3d11va.o
OBJS-$(CONFIG_DXVA2) += hwcontext_dxva2.o
OBJS-$(CONFIG_QSV) += hwcontext_qsv.o
OBJS-$(CONFIG_LZO) += lzo.o
@@ -170,6 +172,7 @@ SLIBOBJS-$(HAVE_GNU_WINDRES) += avutilres.o
SKIPHEADERS-$(HAVE_CUDA_H) += hwcontext_cuda.h
SKIPHEADERS-$(CONFIG_CUDA) += hwcontext_cuda_internal.h
+SKIPHEADERS-$(CONFIG_D3D11VA) += hwcontext_d3d11va.h
SKIPHEADERS-$(CONFIG_DXVA2) += hwcontext_dxva2.h
SKIPHEADERS-$(CONFIG_QSV) += hwcontext_qsv.h
SKIPHEADERS-$(CONFIG_VAAPI) += hwcontext_vaapi.h
diff --git a/libavutil/hwcontext.c b/libavutil/hwcontext.c
index 615f1f7..a9db84b 100644
--- a/libavutil/hwcontext.c
+++ b/libavutil/hwcontext.c
@@ -32,6 +32,9 @@ static const HWContextType *hw_table[] = {
#if CONFIG_CUDA
&ff_hwcontext_type_cuda,
#endif
+#if CONFIG_D3D11VA
+ &ff_hwcontext_type_d3d11va,
+#endif
#if CONFIG_DXVA2
&ff_hwcontext_type_dxva2,
#endif
diff --git a/libavutil/hwcontext.h b/libavutil/hwcontext.h
index 785da09..1325917 100644
--- a/libavutil/hwcontext.h
+++ b/libavutil/hwcontext.h
@@ -30,6 +30,7 @@ enum AVHWDeviceType {
AV_HWDEVICE_TYPE_VAAPI,
AV_HWDEVICE_TYPE_DXVA2,
AV_HWDEVICE_TYPE_QSV,
+ AV_HWDEVICE_TYPE_D3D11VA,
};
typedef struct AVHWDeviceInternal AVHWDeviceInternal;
diff --git a/libavutil/hwcontext_d3d11va.c b/libavutil/hwcontext_d3d11va.c
new file mode 100644
index 0000000..3d9a34a
--- /dev/null
+++ b/libavutil/hwcontext_d3d11va.c
@@ -0,0 +1,461 @@
+/*
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+#include <windows.h>
+
+#if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0600
+#undef _WIN32_WINNT
+#define _WIN32_WINNT 0x0600
+#endif
+#define COBJMACROS
+
+#include <initguid.h>
+#include <d3d11.h>
+#include <dxgi1_2.h>
+
+#include "avassert.h"
+#include "common.h"
+#include "hwcontext.h"
+#include "hwcontext_d3d11va.h"
+#include "hwcontext_internal.h"
+#include "imgutils.h"
+#include "pixdesc.h"
+#include "pixfmt.h"
+#include "compat/w32dlfcn.h"
+
+typedef HRESULT(WINAPI *PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory);
+
+typedef struct D3D11VAFramesContext {
+ ID3D11VideoDecoderOutputView **surfaces_internal;
+ int nb_surfaces_used;
+
+ ID3D11DeviceContext *d3d11_context;
+
+ DXGI_FORMAT format;
+
+ ID3D11Texture2D *staging_texture;
+} D3D11VAFramesContext;
+
+typedef struct D3D11VADevicePriv {
+ HMODULE d3dlib;
+} D3D11VADevicePriv;
+
+static const struct {
+ DXGI_FORMAT d3d_format;
+ enum AVPixelFormat pix_fmt;
+} supported_formats[] = {
+ { DXGI_FORMAT_NV12, AV_PIX_FMT_NV12 },
+ { DXGI_FORMAT_P010, AV_PIX_FMT_P010 },
+};
+
+static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
+{
+ AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
+ D3D11VAFramesContext *s = ctx->internal->priv;
+
+ av_freep(&s->surfaces_internal);
+
+ if (frames_hwctx->video_decoder)
+ ID3D11VideoDecoder_Release(frames_hwctx->video_decoder);
+
+ if (s->staging_texture)
+ ID3D11Texture2D_Release(s->staging_texture);
+
+ if (s->d3d11_context) {
+ ID3D11DeviceContext_Release(s->d3d11_context);
+ s->d3d11_context = NULL;
+ }
+}
+
+static void free_surface(void *opaque, uint8_t *data)
+{
+ ID3D11VideoDecoderOutputView_Release((ID3D11VideoDecoderOutputView*)data);
+}
+
+static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
+{
+ AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
+ D3D11VAFramesContext *s = ctx->internal->priv;
+ AVD3D11VAFramesContext *hwctx = ctx->hwctx;
+
+ if (s->nb_surfaces_used < hwctx->nb_surfaces) {
+ s->nb_surfaces_used++;
+ return av_buffer_create((uint8_t*)s->surfaces_internal[s->nb_surfaces_used - 1],
+ sizeof(*hwctx->surfaces), free_surface, 0, 0);
+ }
+
+ return NULL;
+}
+
+static int d3d11va_init_pool(AVHWFramesContext *ctx)
+{
+ AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
+ AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
+ D3D11VAFramesContext *s = ctx->internal->priv;
+
+ int i;
+ HRESULT hr;
+ D3D11_TEXTURE2D_DESC texDesc = {0};
+ ID3D11Texture2D *p_texture;
+ D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC viewDesc = {0};
+ D3D11_VIDEO_DECODER_DESC decoderDesc;
+ D3D11_VIDEO_DECODER_CONFIG decoderConfig;
+ ID3D11Device *d3d11_device;
+
+ if (ctx->initial_pool_size <= 0)
+ return 0;
+
+ hr = ID3D11VideoContext_QueryInterface(device_hwctx->video_context, &IID_ID3D11DeviceContext,
+ (void **)&s->d3d11_context);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to get the device context %lx\n", hr);
+ return AVERROR_UNKNOWN;
+ }
+
+ for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
+ if (ctx->sw_format == supported_formats[i].pix_fmt) {
+ s->format = supported_formats[i].d3d_format;
+ break;
+ }
+ }
+ if (i == FF_ARRAY_ELEMS(supported_formats)) {
+ av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n",
+ av_get_pix_fmt_name(ctx->sw_format));
+ return AVERROR(EINVAL);
+ }
+
+ s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
+ sizeof(*s->surfaces_internal));
+ if (!s->surfaces_internal)
+ return AVERROR(ENOMEM);
+
+ texDesc.Width = ctx->width;
+ texDesc.Height = ctx->height;
+ texDesc.MipLevels = 1;
+ texDesc.Format = s->format;
+ texDesc.SampleDesc.Count = 1;
+ texDesc.ArraySize = ctx->initial_pool_size;
+ texDesc.Usage = D3D11_USAGE_DEFAULT;
+ texDesc.BindFlags = D3D11_BIND_DECODER;
+
+ hr = ID3D11VideoDevice_QueryInterface(device_hwctx->video_device, &IID_ID3D11Device, (void **)&d3d11_device);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to get the device %lx\n", hr);
+ return AVERROR_UNKNOWN;
+ }
+
+ hr = ID3D11Device_CreateTexture2D(d3d11_device, &texDesc, NULL, &p_texture);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "Could not create the texture %lx\n", hr);
+ ID3D11Device_Release(d3d11_device);
+ return AVERROR_UNKNOWN;
+ }
+
+ hr = ID3D11VideoDecoder_GetCreationParameters(frames_hwctx->video_decoder, &decoderDesc, &decoderConfig);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "Could not get the decoder config %lx\n", hr);
+ ID3D11Texture2D_Release(p_texture);
+ ID3D11Device_Release(d3d11_device);
+ return AVERROR_UNKNOWN;
+ }
+
+ viewDesc.DecodeProfile = decoderDesc.Guid;
+ viewDesc.ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D;
+ for (i=0; i<ctx->initial_pool_size; i++)
+ {
+ hr = ID3D11VideoDevice_CreateVideoDecoderOutputView(device_hwctx->video_device,
+ (ID3D11Resource*) p_texture,
+ &viewDesc,
+ (ID3D11VideoDecoderOutputView**) &s->surfaces_internal[i]);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "Could not create the decoder output %d\n", i);
+ while (--i >= 0) {
+ ID3D11VideoDecoderOutputView_Release(s->surfaces_internal[i]);
+ s->surfaces_internal[i] = NULL;
+ }
+ ID3D11Texture2D_Release(p_texture);
+ ID3D11Device_Release(d3d11_device);
+ return AVERROR_UNKNOWN;
+ }
+ }
+ ID3D11Texture2D_Release(p_texture);
+
+ texDesc.ArraySize = 1;
+ texDesc.Usage = D3D11_USAGE_STAGING;
+ texDesc.BindFlags = 0;
+ texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ | D3D11_CPU_ACCESS_WRITE;
+ hr = ID3D11Device_CreateTexture2D(d3d11_device, &texDesc, NULL, &s->staging_texture);
+ ID3D11Device_Release(d3d11_device);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "Could not create the staging texture %lx\n", hr);
+ return AVERROR_UNKNOWN;
+ }
+
+ ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(*s->surfaces_internal),
+ ctx, d3d11va_pool_alloc, NULL);
+ if (!ctx->internal->pool_internal)
+ return AVERROR(ENOMEM);
+
+ frames_hwctx->surfaces = s->surfaces_internal;
+ frames_hwctx->nb_surfaces = ctx->initial_pool_size;
+
+ return 0;
+}
+
+static int d3d11va_frames_init(AVHWFramesContext *ctx)
+{
+ int ret;
+
+ /* init the frame pool if the caller didn't provide one */
+ if (!ctx->pool) {
+ ret = d3d11va_init_pool(ctx);
+ if (ret < 0) {
+ av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
+ return ret;
+ }
+ }
+
+ return 0;
+}
+
+static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
+{
+ frame->buf[0] = av_buffer_pool_get(ctx->pool);
+ if (!frame->buf[0])
+ return AVERROR(ENOMEM);
+
+ frame->data[3] = frame->buf[0]->data;
+ frame->format = AV_PIX_FMT_D3D11VA_VLD;
+ frame->width = ctx->width;
+ frame->height = ctx->height;
+
+ return 0;
+}
+
+static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx,
+ enum AVHWFrameTransferDirection dir,
+ enum AVPixelFormat **formats)
+{
+ enum AVPixelFormat *fmts;
+
+ fmts = av_malloc_array(2, sizeof(*fmts));
+ if (!fmts)
+ return AVERROR(ENOMEM);
+
+ fmts[0] = ctx->sw_format;
+ fmts[1] = AV_PIX_FMT_NONE;
+
+ *formats = fmts;
+
+ return 0;
+}
+
+static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
+ const AVFrame *src)
+{
+ ID3D11VideoDecoderOutputView *surface;
+ D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC surfaceDesc;
+ D3D11_TEXTURE2D_DESC dstDesc;
+ D3D11_MAPPED_SUBRESOURCE LockedRect;
+ ID3D11Resource *pTexture;
+ HRESULT hr;
+ AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
+ AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
+ D3D11VAFramesContext *s = ctx->internal->priv;
+
+ uint8_t *surf_data[4] = { NULL };
+ int surf_linesize[4] = { 0 };
+ int i;
+
+ int download = !!src->hw_frames_ctx;
+
+ surface = (ID3D11VideoDecoderOutputView*)(download ? src->data[3] : dst->data[3]);
+
+ ID3D11VideoDecoderOutputView_GetDesc(surface, &surfaceDesc);
+ ID3D11VideoDecoderOutputView_GetResource(surface, &pTexture);
+ ID3D11Texture2D_GetDesc(s->staging_texture, &dstDesc);
+
+ WaitForSingleObjectEx(device_hwctx->dev_ctx_mutex, INFINITE, FALSE);
+
+ ID3D11DeviceContext_CopySubresourceRegion(s->d3d11_context, (ID3D11Resource*)s->staging_texture,
+ 0, 0, 0, 0,
+ (ID3D11Resource*)pTexture, surfaceDesc.Texture2D.ArraySlice,
+ NULL);
+ ID3D11Resource_Release(pTexture);
+
+ hr = ID3D11DeviceContext_Map(s->d3d11_context, (ID3D11Resource*)s->staging_texture,
+ 0, download ? D3D11_MAP_READ : D3D11_MAP_WRITE, 0, &LockedRect);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "Unable to lock D3D11VA surface %lx\n", hr);
+ ReleaseMutex(device_hwctx->dev_ctx_mutex);
+ return AVERROR_UNKNOWN;
+ }
+
+ for (i = 0; download ? dst->data[i] : src->data[i]; i++)
+ surf_linesize[i] = LockedRect.RowPitch;
+
+ av_image_fill_pointers(surf_data, ctx->sw_format, dstDesc.Height,
+ (uint8_t*)LockedRect.pData, surf_linesize);
+
+ if (download) {
+ av_image_copy(dst->data, dst->linesize, surf_data, surf_linesize,
+ ctx->sw_format, src->width, src->height);
+ } else {
+ av_image_copy(surf_data, surf_linesize, src->data, src->linesize,
+ ctx->sw_format, src->width, src->height);
+ }
+
+ ID3D11DeviceContext_Unmap(s->d3d11_context, (ID3D11Resource*)s->staging_texture, 0);
+ ReleaseMutex(device_hwctx->dev_ctx_mutex);
+
+ return 0;
+}
+
+static void d3d11va_device_free(AVHWDeviceContext *ctx)
+{
+ AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
+ D3D11VADevicePriv *priv = ctx->user_opaque;
+
+ if (device_hwctx->video_device)
+ ID3D11Device_Release(device_hwctx->video_device);
+
+ if (device_hwctx->video_context)
+ ID3D11VideoContext_Release(device_hwctx->video_context);
+
+ if (device_hwctx->dev_ctx_mutex != INVALID_HANDLE_VALUE)
+ ReleaseMutex(device_hwctx->dev_ctx_mutex);
+
+ if (priv->d3dlib)
+ FreeLibrary(priv->d3dlib);
+
+ av_freep(&ctx->user_opaque);
+}
+
+static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
+ AVDictionary *opts, int flags)
+{
+ AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
+ D3D11VADevicePriv *priv;
+
+ HRESULT hr;
+ PFN_D3D11_CREATE_DEVICE createD3D;
+ IDXGIAdapter *pAdapter = NULL;
+ ID3D10Multithread *pMultithread;
+ ID3D11Device *d3d11_device;
+ ID3D11DeviceContext *d3d11_context;
+ UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
+ /* if the DirectX SDK is installed creationFlags |= D3D11_CREATE_DEVICE_DEBUG; */
+
+ if (device) {
+ PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory;
+ HMODULE dxgilib = dlopen("dxgi.dll", 0);
+ if (!dxgilib)
+ return AVERROR_UNKNOWN;
+
+ mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY) dlsym(dxgilib, "CreateDXGIFactory");
+ if (mCreateDXGIFactory) {
+ IDXGIFactory2 *pDXGIFactory;
+ hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
+ if (SUCCEEDED(hr)) {
+ int adapter = atoi(device);
+ if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
+ pAdapter = NULL;
+ IDXGIFactory2_Release(pDXGIFactory);
+ }
+ }
+ FreeLibrary(dxgilib);
+ }
+
+ priv = av_mallocz(sizeof(*priv));
+ if (!priv)
+ return AVERROR(ENOMEM);
+
+ ctx->user_opaque = priv;
+ ctx->free = d3d11va_device_free;
+
+ priv->d3dlib = dlopen("d3d11.dll", 0);
+ if (!priv->d3dlib) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to load D3D11 library\n");
+ return AVERROR_UNKNOWN;
+ }
+
+ createD3D = (PFN_D3D11_CREATE_DEVICE)dlsym(priv->d3dlib, "D3D11CreateDevice");
+ if (!createD3D) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to locate D3D11CreateDevice\n");
+ return AVERROR_UNKNOWN;
+ }
+
+ hr = createD3D(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, NULL, creationFlags, NULL, 0,
+ D3D11_SDK_VERSION, &d3d11_device, NULL, &d3d11_context);
+ if (pAdapter)
+ IDXGIAdapter_Release(pAdapter);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device %lx\n", hr);
+ return AVERROR_UNKNOWN;
+ }
+
+ hr = ID3D11Device_QueryInterface(d3d11_device, &IID_ID3D10Multithread, (void **)&pMultithread);
+ if (SUCCEEDED(hr)) {
+ ID3D10Multithread_SetMultithreadProtected(pMultithread, TRUE);
+ ID3D10Multithread_Release(pMultithread);
+ }
+
+ hr = ID3D11DeviceContext_QueryInterface(d3d11_context, &IID_ID3D11VideoContext,
+ (void **)&device_hwctx->video_context);
+ ID3D11DeviceContext_Release(d3d11_context);
+ if (FAILED(hr)) {
+ av_log(ctx, AV_LOG_ERROR, "Failed to get the Video Context %lx\n", hr);
+ ID3D11Device_Release(d3d11_device);
+ return AVERROR_UNKNOWN;
+ }
+
+ hr = ID3D11Device_QueryInterface(d3d11_device, &IID_ID3D11VideoDevice,
+ (void **)&device_hwctx->video_device);
+ ID3D11Device_Release(d3d11_device);
+ if (FAILED(hr)) {
+ av_log(NULL, AV_LOG_ERROR, "Failed to get the Video Device %lx\n", hr);
+ return AVERROR_UNKNOWN;
+ }
+
+ device_hwctx->dev_ctx_mutex = CreateMutex(NULL, 0, NULL);
+ if (device_hwctx->dev_ctx_mutex == INVALID_HANDLE_VALUE) {
+ av_log(NULL, AV_LOG_ERROR, "Failed to get a mutex for the D3D11VA decoder\n");
+ return AVERROR(EINVAL);
+ }
+
+ return 0;
+}
+
+const HWContextType ff_hwcontext_type_d3d11va = {
+ .type = AV_HWDEVICE_TYPE_D3D11VA,
+ .name = "D3D11VA",
+
+ .device_hwctx_size = sizeof(AVD3D11VADeviceContext),
+ .frames_hwctx_size = sizeof(AVD3D11VAFramesContext),
+ .frames_priv_size = sizeof(D3D11VAFramesContext),
+
+ .device_create = d3d11va_device_create,
+ .frames_init = d3d11va_frames_init,
+ .frames_uninit = d3d11va_frames_uninit,
+ .frames_get_buffer = d3d11va_get_buffer,
+ .transfer_get_formats = d3d11va_transfer_get_formats,
+ .transfer_data_to = d3d11va_transfer_data,
+ .transfer_data_from = d3d11va_transfer_data,
+
+ .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_D3D11VA_VLD, AV_PIX_FMT_NONE },
+};
diff --git a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h
new file mode 100644
index 0000000..31dad6d
--- /dev/null
+++ b/libavutil/hwcontext_d3d11va.h
@@ -0,0 +1,70 @@
+/*
+ * This file is part of FFmpeg.
+ *
+ * FFmpeg is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Lesser General Public
+ * License as published by the Free Software Foundation; either
+ * version 2.1 of the License, or (at your option) any later version.
+ *
+ * FFmpeg is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with FFmpeg; if not, write to the Free Software
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
+ */
+
+
+#ifndef AVUTIL_HWCONTEXT_D3D11VA_H
+#define AVUTIL_HWCONTEXT_D3D11VA_H
+
+/**
+ * @file
+ * An API-specific header for AV_HWDEVICE_TYPE_D3D11VA.
+ *
+ * Only fixed-size pools are supported.
+ *
+ * For user-allocated pools, AVHWFramesContext.pool must return AVBufferRefs
+ * with the data pointer set to a pointer to ID3D11VideoDecoderOutputView.
+ */
+
+#include <d3d11.h>
+
+/**
+ * This struct is allocated as AVHWDeviceContext.hwctx
+ */
+typedef struct AVD3D11VADeviceContext {
+ ID3D11VideoDevice *video_device;
+ ID3D11VideoContext *video_context;
+
+ /**
+ * Mutex owned by this context to avoid accessing the video_context from
+ * multiple threads simultaneously.
+ */
+ HANDLE dev_ctx_mutex;
+} AVD3D11VADeviceContext;
+
+/**
+ * This struct is allocated as AVHWFramesContext.hwctx
+ */
+typedef struct AVD3D11VAFramesContext {
+ /**
+ * The surface pool. When an external pool is not provided by the caller,
+ * this will be managed (allocated and filled on init, freed on uninit) by
+ * libavutil.
+ * When it is provided the allocation/deallocation is up to the caller.
+ */
+ ID3D11VideoDecoderOutputView **surfaces;
+ int nb_surfaces;
+
+ /**
+ * Video decoder created by the caller. It must be set before
+ * av_hwframe_ctx_init() is called. When decoding is done it will be
+ * released.
+ */
+ ID3D11VideoDecoder *video_decoder;
+} AVD3D11VAFramesContext;
+
+#endif /* AVUTIL_HWCONTEXT_D3D11VA_H */
diff --git a/libavutil/hwcontext_internal.h b/libavutil/hwcontext_internal.h
index 079e42b..aaa6286 100644
--- a/libavutil/hwcontext_internal.h
+++ b/libavutil/hwcontext_internal.h
@@ -100,6 +100,7 @@ struct AVHWFramesInternal {
};
extern const HWContextType ff_hwcontext_type_cuda;
+extern const HWContextType ff_hwcontext_type_d3d11va;
extern const HWContextType ff_hwcontext_type_dxva2;
extern const HWContextType ff_hwcontext_type_qsv;
extern const HWContextType ff_hwcontext_type_vaapi;
diff --git a/libavutil/version.h b/libavutil/version.h
index 9f8c4c2..6dac240 100644
--- a/libavutil/version.h
+++ b/libavutil/version.h
@@ -80,7 +80,7 @@
#define LIBAVUTIL_VERSION_MAJOR 55
#define LIBAVUTIL_VERSION_MINOR 43
-#define LIBAVUTIL_VERSION_MICRO 100
+#define LIBAVUTIL_VERSION_MICRO 101
#define LIBAVUTIL_VERSION_INT AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \
LIBAVUTIL_VERSION_MINOR, \
--
2.10.1.windows.1
More information about the ffmpeg-devel
mailing list