[FFmpeg-devel] [PATCH] ffmpeg: add ffmpeg_d3d11va
wm4
nfxjfg at googlemail.com
Wed Dec 14 12:44:13 EET 2016
On Wed, 14 Dec 2016 09:30:35 +0000
Stève Lhomme <robux4 at videolabs.io> wrote:
> On Tue, Dec 13, 2016 at 2:35 PM, wm4 <nfxjfg at googlemail.com> wrote:
> > On Tue, 13 Dec 2016 14:19:35 +0100
> > Steve Lhomme <robux4 at gmail.com> wrote:
> >
> > I'm pretty much against this, since it duplicates the profile selection
> > code all over again, even if it could be shared. (And this code should
> > be in libavcodec in the first place.) Someone adding such code would
> > have to refactor ffmpeg_d3d11va too.
>
> I'm not sure how this relates to the line above. d3d11va and dxva2 are
> definitely not the same thing. On mobile you do not have DXVA2 at all
> but D3D11(VA) is present. There must be a way to have one without the
> other.
Yeah, but selection of the decoder GUIDs is very similar between both
APIs. It can definitely be shared (we do this in mpv). There's
absolutely no reason to copy-paste that stuff from ffmpeg_dxva2.c into
ffmpeg_d3d11va.c.
As for how libavcodec can do provide this part, see the Libav/avconv
vaapi support (none of that is in ffmpeg yet).
> >> diff --git a/libavutil/Makefile b/libavutil/Makefile
> >> index 9841645..9b8ce22 100644
> >> --- a/libavutil/Makefile
> >> +++ b/libavutil/Makefile
> >> @@ -34,6 +34,7 @@ HEADERS = adler32.h \
> >> hmac.h \
> >> hwcontext.h \
> >> hwcontext_cuda.h \
> >> + hwcontext_d3d11va.h \
> >> hwcontext_dxva2.h \
> >> hwcontext_qsv.h \
> >> hwcontext_vaapi.h \
> >> @@ -156,6 +157,7 @@ OBJS = adler32.o \
> >> OBJS-$(!HAVE_ATOMICS_NATIVE) += atomic.o \
> >>
> >> OBJS-$(CONFIG_CUDA) += hwcontext_cuda.o
> >> +OBJS-$(CONFIG_D3D11VA) += hwcontext_d3d11va.o
> >> OBJS-$(CONFIG_DXVA2) += hwcontext_dxva2.o
> >> OBJS-$(CONFIG_QSV) += hwcontext_qsv.o
> >> OBJS-$(CONFIG_LZO) += lzo.o
> >> @@ -170,6 +172,7 @@ SLIBOBJS-$(HAVE_GNU_WINDRES) += avutilres.o
> >>
> >> SKIPHEADERS-$(HAVE_CUDA_H) += hwcontext_cuda.h
> >> SKIPHEADERS-$(CONFIG_CUDA) += hwcontext_cuda_internal.h
> >> +SKIPHEADERS-$(CONFIG_D3D11VA) += hwcontext_d3d11va.h
> >> SKIPHEADERS-$(CONFIG_DXVA2) += hwcontext_dxva2.h
> >> SKIPHEADERS-$(CONFIG_QSV) += hwcontext_qsv.h
> >> SKIPHEADERS-$(CONFIG_VAAPI) += hwcontext_vaapi.h
> >> diff --git a/libavutil/hwcontext.c b/libavutil/hwcontext.c
> >> index 615f1f7..a9db84b 100644
> >> --- a/libavutil/hwcontext.c
> >> +++ b/libavutil/hwcontext.c
> >> @@ -32,6 +32,9 @@ static const HWContextType *hw_table[] = {
> >> #if CONFIG_CUDA
> >> &ff_hwcontext_type_cuda,
> >> #endif
> >> +#if CONFIG_D3D11VA
> >> + &ff_hwcontext_type_d3d11va,
> >> +#endif
> >> #if CONFIG_DXVA2
> >> &ff_hwcontext_type_dxva2,
> >> #endif
> >> diff --git a/libavutil/hwcontext.h b/libavutil/hwcontext.h
> >> index 785da09..e29dc67 100644
> >> --- a/libavutil/hwcontext.h
> >> +++ b/libavutil/hwcontext.h
> >> @@ -28,6 +28,7 @@ enum AVHWDeviceType {
> >> AV_HWDEVICE_TYPE_VDPAU,
> >> AV_HWDEVICE_TYPE_CUDA,
> >> AV_HWDEVICE_TYPE_VAAPI,
> >> + AV_HWDEVICE_TYPE_D3D11VA,
> >> AV_HWDEVICE_TYPE_DXVA2,
> >> AV_HWDEVICE_TYPE_QSV,
> >> };
> >> diff --git a/libavutil/hwcontext_d3d11va.c b/libavutil/hwcontext_d3d11va.c
> >> new file mode 100644
> >> index 0000000..6ac1019
> >> --- /dev/null
> >> +++ b/libavutil/hwcontext_d3d11va.c
> >> @@ -0,0 +1,436 @@
> >> +/*
> >> + * This file is part of FFmpeg.
> >> + *
> >> + * FFmpeg is free software; you can redistribute it and/or
> >> + * modify it under the terms of the GNU Lesser General Public
> >> + * License as published by the Free Software Foundation; either
> >> + * version 2.1 of the License, or (at your option) any later version.
> >> + *
> >> + * FFmpeg is distributed in the hope that it will be useful,
> >> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> >> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> >> + * Lesser General Public License for more details.
> >> + *
> >> + * You should have received a copy of the GNU Lesser General Public
> >> + * License along with FFmpeg; if not, write to the Free Software
> >> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> >> + */
> >> +
> >> +#include <windows.h>
> >> +
> >> +#if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0600
> >> +#undef _WIN32_WINNT
> >> +#define _WIN32_WINNT 0x0600
> >> +#endif
> >> +#define COBJMACROS
> >> +
> >> +#include <initguid.h>
> >> +#include <d3d11.h>
> >> +#include <dxgi1_2.h>
> >> +
> >> +#include "avassert.h"
> >> +#include "common.h"
> >> +#include "hwcontext.h"
> >> +#include "hwcontext_d3d11va.h"
> >> +#include "hwcontext_internal.h"
> >> +#include "imgutils.h"
> >> +#include "pixdesc.h"
> >> +#include "pixfmt.h"
> >> +#include "compat/w32dlfcn.h"
> >> +
> >> +typedef HRESULT(WINAPI *PFN_CREATE_DXGI_FACTORY)(REFIID riid, void **ppFactory);
> >> +
> >> +typedef struct D3D11VAFramesContext {
> >> + ID3D11VideoDecoderOutputView **surfaces_internal;
> >> + int nb_surfaces_used;
> >> +
> >> + ID3D11DeviceContext *d3d11_context;
> >> +
> >> + DXGI_FORMAT format;
> >> +} D3D11VAFramesContext;
> >> +
> >> +typedef struct D3D11VADevicePriv {
> >> + HMODULE d3dlib;
> >> +} D3D11VADevicePriv;
> >> +
> >> +static const struct {
> >> + DXGI_FORMAT d3d_format;
> >> + enum AVPixelFormat pix_fmt;
> >> +} supported_formats[] = {
> >> + { DXGI_FORMAT_NV12, AV_PIX_FMT_NV12 },
> >> + { DXGI_FORMAT_P010, AV_PIX_FMT_P010 },
> >> +};
> >> +
> >> +static void d3d11va_frames_uninit(AVHWFramesContext *ctx)
> >> +{
> >> + AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
> >> + D3D11VAFramesContext *s = ctx->internal->priv;
> >> +
> >> + if (frames_hwctx->decoder_to_release)
> >> + ID3D11VideoDecoder_Release(frames_hwctx->decoder_to_release);
> >> +
> >> + av_freep(&s->surfaces_internal);
> >> +
> >> + if (frames_hwctx->staging_texture)
> >> + ID3D11Texture2D_Release(frames_hwctx->staging_texture);
> >> +
> >> + if (s->d3d11_context) {
> >> + ID3D11DeviceContext_Release(s->d3d11_context);
> >> + s->d3d11_context = NULL;
> >> + }
> >> +}
> >> +
> >> +static void free_surface(void *opaque, uint8_t *data)
> >> +{
> >> + ID3D11VideoDecoderOutputView_Release((ID3D11VideoDecoderOutputView*)data);
> >> +}
> >> +
> >> +static AVBufferRef *d3d11va_pool_alloc(void *opaque, int size)
> >> +{
> >> + AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
> >> + D3D11VAFramesContext *s = ctx->internal->priv;
> >> + AVD3D11VAFramesContext *hwctx = ctx->hwctx;
> >> +
> >> + if (s->nb_surfaces_used < hwctx->nb_surfaces) {
> >> + s->nb_surfaces_used++;
> >> + return av_buffer_create((uint8_t*)s->surfaces_internal[s->nb_surfaces_used - 1],
> >> + sizeof(*hwctx->surfaces), free_surface, 0, 0);
> >> + }
> >> +
> >> + return NULL;
> >> +}
> >> +
> >> +static int d3d11va_init_pool(AVHWFramesContext *ctx)
> >> +{
> >> + AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
> >> + AVD3D11VADeviceContext *device_hwctx = ctx->device_ctx->hwctx;
> >> + D3D11VAFramesContext *s = ctx->internal->priv;
> >> +
> >> + int i;
> >> + HRESULT hr;
> >> + D3D11_TEXTURE2D_DESC texDesc;
> >> + ID3D11Texture2D *p_texture;
> >> + D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC viewDesc;
> >> + D3D11_VIDEO_DECODER_DESC decoderDesc;
> >> + D3D11_VIDEO_DECODER_CONFIG decoderConfig;
> >> +
> >> + if (ctx->initial_pool_size <= 0)
> >> + return 0;
> >> +
> >> + hr = ID3D11VideoContext_QueryInterface(device_hwctx->video_context, &IID_ID3D11DeviceContext, (void **)&s->d3d11_context);
> >> + if (FAILED(hr)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Failed to get the device context %lx\n", hr);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) {
> >> + if (ctx->sw_format == supported_formats[i].pix_fmt) {
> >> + s->format = supported_formats[i].d3d_format;
> >> + break;
> >> + }
> >> + }
> >> + if (i == FF_ARRAY_ELEMS(supported_formats)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n",
> >> + av_get_pix_fmt_name(ctx->sw_format));
> >> + return AVERROR(EINVAL);
> >> + }
> >> +
> >> + s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
> >> + sizeof(*s->surfaces_internal));
> >> + if (!s->surfaces_internal)
> >> + return AVERROR(ENOMEM);
> >> +
> >> + ZeroMemory(&texDesc, sizeof(texDesc));
> >> + texDesc.Width = ctx->width;
> >> + texDesc.Height = ctx->height;
> >> + texDesc.MipLevels = 1;
> >> + texDesc.Format = s->format;
> >> + texDesc.SampleDesc.Count = 1;
> >> + texDesc.ArraySize = ctx->initial_pool_size;
> >> + texDesc.Usage = D3D11_USAGE_DEFAULT;
> >> + texDesc.BindFlags = D3D11_BIND_DECODER;
> >> +
> >> + ID3D11Device *d3d11_device;
> >> + hr = ID3D11VideoDevice_QueryInterface(device_hwctx->video_device, &IID_ID3D11Device, (void **)&d3d11_device);
> >> + if (FAILED(hr)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Failed to get the device %lx\n", hr);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + hr = ID3D11Device_CreateTexture2D(d3d11_device, &texDesc, NULL, &p_texture);
> >> + if (FAILED(hr)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Could not create the texture %lx\n", hr);
> >> + ID3D11Device_Release(d3d11_device);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + hr = ID3D11VideoDecoder_GetCreationParameters(frames_hwctx->decoder_to_release, &decoderDesc, &decoderConfig);
> >> + if (FAILED(hr)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Could not get the decoder config %lx\n", hr);
> >> + ID3D11Texture2D_Release(p_texture);
> >> + ID3D11Device_Release(d3d11_device);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + ZeroMemory(&viewDesc, sizeof(viewDesc));
> >> + viewDesc.DecodeProfile = decoderDesc.Guid;
> >> + viewDesc.ViewDimension = D3D11_VDOV_DIMENSION_TEXTURE2D;
> >> + for (i=0; i<ctx->initial_pool_size; i++)
> >> + {
> >> + hr = ID3D11VideoDevice_CreateVideoDecoderOutputView(device_hwctx->video_device,
> >> + (ID3D11Resource*) p_texture,
> >> + &viewDesc,
> >> + (ID3D11VideoDecoderOutputView**) &s->surfaces_internal[i]);
> >> + if (FAILED(hr)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Could not create the decoder output %d\n", i);
> >> + while (--i >= 0) {
> >> + ID3D11VideoDecoderOutputView_Release(s->surfaces_internal[i]);
> >> + s->surfaces_internal[i] = NULL;
> >> + }
> >> + ID3D11Texture2D_Release(p_texture);
> >> + ID3D11Device_Release(d3d11_device);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> + }
> >> + ID3D11Texture2D_Release(p_texture);
> >> +
> >> + texDesc.ArraySize = 1;
> >> + texDesc.Usage = D3D11_USAGE_STAGING;
> >> + texDesc.BindFlags = 0;
> >> + texDesc.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
> >
> > Doesn't this exclude upload?
>
> I only too decoding in consideration. Since it's a staging texture,
> adding ACCESS_WRITE is possible. When mapping the surface to the CPU
> I'll need to adjust the flags.
>
> >> + hr = ID3D11Device_CreateTexture2D(d3d11_device, &texDesc, NULL, &frames_hwctx->staging_texture);
> >> + ID3D11Device_Release(d3d11_device);
> >> + if (FAILED(hr)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Could not create the staging texture %lx\n", hr);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(*s->surfaces_internal),
> >> + ctx, d3d11va_pool_alloc, NULL);
> >> + if (!ctx->internal->pool_internal)
> >> + return AVERROR(ENOMEM);
> >> +
> >> + frames_hwctx->surfaces = s->surfaces_internal;
> >> + frames_hwctx->nb_surfaces = ctx->initial_pool_size;
> >> +
> >> + return 0;
> >> +}
> >> +
> >> +static int d3d11va_frames_init(AVHWFramesContext *ctx)
> >> +{
> >> + int ret;
> >> +
> >> + /* init the frame pool if the caller didn't provide one */
> >> + if (!ctx->pool) {
> >> + ret = d3d11va_init_pool(ctx);
> >> + if (ret < 0) {
> >> + av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
> >> + return ret;
> >> + }
> >> + }
> >> +
> >> + return 0;
> >> +}
> >> +
> >> +static int d3d11va_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
> >> +{
> >> + frame->buf[0] = av_buffer_pool_get(ctx->pool);
> >> + if (!frame->buf[0])
> >> + return AVERROR(ENOMEM);
> >> +
> >> + frame->data[3] = frame->buf[0]->data;
> >> + frame->format = AV_PIX_FMT_D3D11VA_VLD;
> >> + frame->width = ctx->width;
> >> + frame->height = ctx->height;
> >> +
> >> + return 0;
> >> +}
> >> +
> >> +static int d3d11va_transfer_get_formats(AVHWFramesContext *ctx,
> >> + enum AVHWFrameTransferDirection dir,
> >> + enum AVPixelFormat **formats)
> >> +{
> >> + enum AVPixelFormat *fmts;
> >> +
> >> + fmts = av_malloc_array(2, sizeof(*fmts));
> >> + if (!fmts)
> >> + return AVERROR(ENOMEM);
> >> +
> >> + fmts[0] = ctx->sw_format;
> >> + fmts[1] = AV_PIX_FMT_NONE;
> >> +
> >> + *formats = fmts;
> >> +
> >> + return 0;
> >> +}
> >> +
> >> +static int d3d11va_transfer_data(AVHWFramesContext *ctx, AVFrame *dst,
> >> + const AVFrame *src)
> >> +{
> >> + ID3D11VideoDecoderOutputView *surface;
> >> + D3D11_VIDEO_DECODER_OUTPUT_VIEW_DESC surfaceDesc;
> >> + D3D11_TEXTURE2D_DESC dstDesc;
> >> + D3D11_MAPPED_SUBRESOURCE LockedRect;
> >> + ID3D11Resource *pTexture;
> >> + HRESULT hr;
> >> + AVD3D11VAFramesContext *frames_hwctx = ctx->hwctx;
> >> + D3D11VAFramesContext *s = ctx->internal->priv;
> >> +
> >> + uint8_t *surf_data[4] = { NULL };
> >> + int surf_linesize[4] = { 0 };
> >> + int i;
> >> +
> >> + int download = !!src->hw_frames_ctx;
> >> +
> >> + surface = (ID3D11VideoDecoderOutputView*)(download ? src->data[3] : dst->data[3]);
> >> +
> >> + ID3D11VideoDecoderOutputView_GetDesc(surface, &surfaceDesc);
> >> + ID3D11VideoDecoderOutputView_GetResource(surface, &pTexture);
> >> +
> >> + ID3D11DeviceContext_CopySubresourceRegion(s->d3d11_context, (ID3D11Resource*)frames_hwctx->staging_texture,
> >> + 0, 0, 0, 0,
> >> + (ID3D11Resource*)pTexture, surfaceDesc.Texture2D.ArraySlice,
> >> + NULL);
> >> + ID3D11Resource_Release(pTexture);
> >> +
> >> + hr = ID3D11DeviceContext_Map(s->d3d11_context, (ID3D11Resource*)frames_hwctx->staging_texture,
> >> + 0, D3D11_MAP_READ, 0, &LockedRect);
> >> + if (FAILED(hr)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Unable to lock D3D11VA surface %lx\n", hr);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + for (i = 0; download ? dst->data[i] : src->data[i]; i++)
> >> + surf_linesize[i] = LockedRect.RowPitch;
> >> +
> >> + ID3D11Texture2D_GetDesc(frames_hwctx->staging_texture, &dstDesc);
> >> + av_image_fill_pointers(surf_data, ctx->sw_format, dstDesc.Height,
> >> + (uint8_t*)LockedRect.pData, surf_linesize);
> >> +
> >> + if (download) {
> >> + av_image_copy(dst->data, dst->linesize, surf_data, surf_linesize,
> >> + ctx->sw_format, src->width, src->height);
> >> + } else {
> >> + av_image_copy(surf_data, surf_linesize, src->data, src->linesize,
> >> + ctx->sw_format, src->width, src->height);
> >> + }
> >> +
> >> + ID3D11DeviceContext_Unmap(s->d3d11_context, (ID3D11Resource*)frames_hwctx->staging_texture, 0);
> >> +
> >> + return 0;
> >> +}
> >> +
> >> +static void d3d11va_device_free(AVHWDeviceContext *ctx)
> >> +{
> >> + AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
> >> + D3D11VADevicePriv *priv = ctx->user_opaque;
> >> +
> >> + if (device_hwctx->video_device)
> >> + ID3D11Device_Release(device_hwctx->video_device);
> >> +
> >> + if (device_hwctx->video_context)
> >> + ID3D11VideoContext_Release(device_hwctx->video_context);
> >> +
> >> + if (priv->d3dlib)
> >> + dlclose(priv->d3dlib);
> >> +
> >> + av_freep(&ctx->user_opaque);
> >> +}
> >> +
> >> +static int d3d11va_device_create(AVHWDeviceContext *ctx, const char *device,
> >> + AVDictionary *opts, int flags)
> >> +{
> >> + AVD3D11VADeviceContext *device_hwctx = ctx->hwctx;
> >> + D3D11VADevicePriv *priv;
> >> +
> >> + HRESULT hr;
> >> + PFN_D3D11_CREATE_DEVICE createD3D;
> >> + IDXGIAdapter *pAdapter = NULL;
> >> + UINT creationFlags = D3D11_CREATE_DEVICE_VIDEO_SUPPORT;
> >> + /* if the DirectX SDK is installed creationFlags |= D3D11_CREATE_DEVICE_DEBUG; */
> >> +
> >> + if (device) {
> >> + HMODULE dxgilib = dlopen("dxgi.dll", 0);
> >
> > Using dlopen instead of LoadLibrary is just odd.
>
> I used exactly what is used in other parts of the DXVA2 code. If it
> exists I suppose there's a reason for that.
Huh? libavutil/hwcontext_dxva2.c uses LoadLibrary.
I mean, using dlopen() works, but I just can't see any reason for it.
But it obfuscates the code somewhat.
> >> + if (dxgilib) {
> >> + PFN_CREATE_DXGI_FACTORY mCreateDXGIFactory;
> >> + mCreateDXGIFactory = (PFN_CREATE_DXGI_FACTORY)dlsym(dxgilib, "CreateDXGIFactory");
> >> + if (mCreateDXGIFactory) {
> >> + IDXGIFactory2 *pDXGIFactory;
> >> + hr = mCreateDXGIFactory(&IID_IDXGIFactory2, (void **)&pDXGIFactory);
> >> + if (SUCCEEDED(hr)) {
> >> + int adapter = atoi(device);
> >
> > I know the dxva2 code uses this too, but please no lazy skipping of
> > error checking. What does atoi even return if the string is not a
> > number.
>
> It returns 0 and so we'll use the first adapter.
>
Quoting C99:
> The functions atof, atoi, atol, and atoll need not affect the
> value of the integer expression errno on an error. If the
> value of the result cannot be represented, the behavior is
> undefined.
Doesn't sound very promising.
> >> + if (FAILED(IDXGIFactory2_EnumAdapters(pDXGIFactory, adapter, &pAdapter)))
> >> + pAdapter = NULL;
> >> + IDXGIFactory2_Release(pDXGIFactory);
> >> + }
> >> + }
> >> + dlclose(dxgilib);
> >> + }
> >> + }
> >> +
> >> + priv = av_mallocz(sizeof(*priv));
> >> + if (!priv)
> >> + return AVERROR(ENOMEM);
> >> +
> >> + ctx->user_opaque = priv;
> >> + ctx->free = d3d11va_device_free;
> >> +
> >> + priv->d3dlib = dlopen("d3d11.dll", 0);
> >> + if (!priv->d3dlib) {
> >> + av_log(ctx, AV_LOG_ERROR, "Failed to load D3D11 library\n");
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + createD3D = (PFN_D3D11_CREATE_DEVICE)dlsym(priv->d3dlib, "D3D11CreateDevice");
> >> + if (!createD3D) {
> >> + av_log(ctx, AV_LOG_ERROR, "Failed to locate D3D11CreateDevice\n");
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + ID3D11Device *d3d11_device;
> >> + ID3D11DeviceContext *d3d11_context;
> >> + hr = createD3D(pAdapter, pAdapter ? D3D_DRIVER_TYPE_UNKNOWN : D3D_DRIVER_TYPE_HARDWARE, NULL, creationFlags, NULL, 0,
> >> + D3D11_SDK_VERSION, &d3d11_device, NULL, &d3d11_context);
> >> + if (FAILED(hr)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Failed to create Direct3D device %lx\n", hr);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >
> > No call to ID3D10Multithread_SetMultithreadProtected()?
>
> No, it worked without even with multiple threads.
Not sure if that really means it's correct. We all know that even
disregarding the thread-safety of the D3D device, thread-safety of the
dxva2 code is shaky at best.
Anyway, this might also be needed in other contexts (although most API
users which need this will most likely provide their own device). If
you don't want to enable it unconditionally for whatever reasons,
maybe a flag?
> >> +
> >> + hr = ID3D11DeviceContext_QueryInterface(d3d11_context, &IID_ID3D11VideoContext,
> >> + (void **)&device_hwctx->video_context);
> >> + ID3D11DeviceContext_Release(d3d11_context);
> >> + if (FAILED(hr)) {
> >> + av_log(ctx, AV_LOG_ERROR, "Failed to get the Video Context %lx\n", hr);
> >> + ID3D11Device_Release(d3d11_device);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + hr = ID3D11Device_QueryInterface(d3d11_device, &IID_ID3D11VideoDevice,
> >> + (void **)&device_hwctx->video_device);
> >> + ID3D11Device_Release(d3d11_device);
> >> + if (FAILED(hr)) {
> >> + av_log(NULL, AV_LOG_ERROR, "Failed to get the Video Device %lx\n", hr);
> >> + return AVERROR_UNKNOWN;
> >> + }
> >> +
> >> + return 0;
> >> +}
> >> +
> >> +const HWContextType ff_hwcontext_type_d3d11va = {
> >> + .type = AV_HWDEVICE_TYPE_D3D11VA,
> >> + .name = "D3D11VA",
> >> +
> >> + .device_hwctx_size = sizeof(AVD3D11VADeviceContext),
> >> + .frames_hwctx_size = sizeof(AVD3D11VAFramesContext),
> >> + .frames_priv_size = sizeof(D3D11VAFramesContext),
> >> +
> >> + .device_create = d3d11va_device_create,
> >> + .frames_init = d3d11va_frames_init,
> >> + .frames_uninit = d3d11va_frames_uninit,
> >> + .frames_get_buffer = d3d11va_get_buffer,
> >> + .transfer_get_formats = d3d11va_transfer_get_formats,
> >> + .transfer_data_to = d3d11va_transfer_data,
> >> + .transfer_data_from = d3d11va_transfer_data,
> >> +
> >> + .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_D3D11VA_VLD, AV_PIX_FMT_NONE },
> >> +};
> >> diff --git a/libavutil/hwcontext_d3d11va.h b/libavutil/hwcontext_d3d11va.h
> >> new file mode 100644
> >> index 0000000..c3e4437
> >> --- /dev/null
> >> +++ b/libavutil/hwcontext_d3d11va.h
> >> @@ -0,0 +1,74 @@
> >> +/*
> >> + * This file is part of FFmpeg.
> >> + *
> >> + * FFmpeg is free software; you can redistribute it and/or
> >> + * modify it under the terms of the GNU Lesser General Public
> >> + * License as published by the Free Software Foundation; either
> >> + * version 2.1 of the License, or (at your option) any later version.
> >> + *
> >> + * FFmpeg is distributed in the hope that it will be useful,
> >> + * but WITHOUT ANY WARRANTY; without even the implied warranty of
> >> + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
> >> + * Lesser General Public License for more details.
> >> + *
> >> + * You should have received a copy of the GNU Lesser General Public
> >> + * License along with FFmpeg; if not, write to the Free Software
> >> + * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
> >> + */
> >> +
> >> +
> >> +#ifndef AVUTIL_HWCONTEXT_D3D11VA_H
> >> +#define AVUTIL_HWCONTEXT_D3D11VA_H
> >> +
> >> +/**
> >> + * @file
> >> + * An API-specific header for AV_HWDEVICE_TYPE_D3D11VA.
> >> + *
> >> + * Only fixed-size pools are supported.
> >> + *
> >> + * For user-allocated pools, AVHWFramesContext.pool must return AVBufferRefs
> >> + * with the data pointer set to a pointer to ID3D11VideoDecoderOutputView.
> >> + */
> >> +
> >> +#include <d3d11.h>
> >> +
> >> +/**
> >> + * This struct is allocated as AVHWDeviceContext.hwctx
> >> + */
> >> +typedef struct AVD3D11VADeviceContext {
> >> + ID3D11VideoDevice *video_device;
> >> + ID3D11VideoContext *video_context;
> >> +} AVD3D11VADeviceContext;
> >> +
> >> +/**
> >> + * This struct is allocated as AVHWFramesContext.hwctx
> >> + */
> >> +typedef struct AVD3D11VAFramesContext {
> >> + /**
> >> + * The surface pool. When an external pool is not provided by the caller,
> >> + * this will be managed (allocated and filled on init, freed on uninit) by
> >> + * libavutil.
> >
> > Doesn't say whether it also deallocates this stuff if the user allocates
> > it?
>
> I'll update the documentation. It was copy/pasted...
OK. The doxygen on the decoder_to_release field below is much more
explicit, for example.
> >> + */
> >> + ID3D11VideoDecoderOutputView **surfaces;
> >> + int nb_surfaces;
> >> +
> >> + /**
> >> + * Certain drivers require the decoder to be destroyed before the surfaces.
> >> + * To allow internally managed pools to work properly in such cases, this
> >> + * field is provided.
> >> + *
> >> + * The decoder must be created before the surface pool.
> >> + *
> >> + * If it is non-NULL, libavutil will call ID3D11VideoDecoder_Release() on
> >> + * it just before the internal surface pool is freed.
> >> + */
> >> + ID3D11VideoDecoder *decoder_to_release;
> >> +
> >> + /**
> >> + * Internal texture to get access to the decoded pixels from the CPU as the
> >> + * decoder texture needs D3D11_BIND_DECODER which forbdis CPU access.
> >> + */
> >> + ID3D11Texture2D *staging_texture;
> >> +} AVD3D11VAFramesContext;
> >> +
> >> +#endif /* AVUTIL_HWCONTEXT_D3D11VA_H */
> >> diff --git a/libavutil/hwcontext_internal.h b/libavutil/hwcontext_internal.h
> >> index 079e42b..aaa6286 100644
> >> --- a/libavutil/hwcontext_internal.h
> >> +++ b/libavutil/hwcontext_internal.h
> >> @@ -100,6 +100,7 @@ struct AVHWFramesInternal {
> >> };
> >>
> >> extern const HWContextType ff_hwcontext_type_cuda;
> >> +extern const HWContextType ff_hwcontext_type_d3d11va;
> >> extern const HWContextType ff_hwcontext_type_dxva2;
> >> extern const HWContextType ff_hwcontext_type_qsv;
> >> extern const HWContextType ff_hwcontext_type_vaapi;
> >> diff --git a/libavutil/version.h b/libavutil/version.h
> >> index 9f8c4c2..6dac240 100644
> >> --- a/libavutil/version.h
> >> +++ b/libavutil/version.h
> >> @@ -80,7 +80,7 @@
> >>
> >> #define LIBAVUTIL_VERSION_MAJOR 55
> >> #define LIBAVUTIL_VERSION_MINOR 43
> >> -#define LIBAVUTIL_VERSION_MICRO 100
> >> +#define LIBAVUTIL_VERSION_MICRO 101
> >>
> >> #define LIBAVUTIL_VERSION_INT AV_VERSION_INT(LIBAVUTIL_VERSION_MAJOR, \
> >> LIBAVUTIL_VERSION_MINOR, \
> > _______________________________________________
> > ffmpeg-devel mailing list
> > ffmpeg-devel at ffmpeg.org
> > http://ffmpeg.org/mailman/listinfo/ffmpeg-devel
> _______________________________________________
> ffmpeg-devel mailing list
> ffmpeg-devel at ffmpeg.org
> http://ffmpeg.org/mailman/listinfo/ffmpeg-devel
More information about the ffmpeg-devel
mailing list