qsv: align surface width/height to 16.
[libav.git] / libavutil / hwcontext_qsv.c
CommitLineData
59e7361c
AK
1/*
2 * This file is part of Libav.
3 *
4 * Libav is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * Libav is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with Libav; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19#include <stdint.h>
20#include <string.h>
21
22#include <mfx/mfxvideo.h>
23
24#include "config.h"
25
26#if CONFIG_VAAPI
27#include "hwcontext_vaapi.h"
28#endif
29#if CONFIG_DXVA2
30#include "hwcontext_dxva2.h"
31#endif
32
33#include "buffer.h"
34#include "common.h"
35#include "hwcontext.h"
36#include "hwcontext_internal.h"
37#include "hwcontext_qsv.h"
38#include "mem.h"
39#include "pixfmt.h"
40#include "pixdesc.h"
41#include "time.h"
42
43typedef struct QSVDevicePriv {
44 AVBufferRef *child_device_ctx;
45} QSVDevicePriv;
46
47typedef struct QSVDeviceContext {
48 mfxHDL handle;
49 mfxHandleType handle_type;
50 mfxVersion ver;
51 mfxIMPL impl;
52
53 enum AVHWDeviceType child_device_type;
54 enum AVPixelFormat child_pix_fmt;
55} QSVDeviceContext;
56
57typedef struct QSVFramesContext {
58 mfxSession session_download;
59 mfxSession session_upload;
60
61 AVBufferRef *child_frames_ref;
62 mfxFrameSurface1 *surfaces_internal;
63 int nb_surfaces_used;
64
65 // used in the frame allocator for non-opaque surfaces
66 mfxMemId *mem_ids;
67 // used in the opaque alloc request for opaque surfaces
68 mfxFrameSurface1 **surface_ptrs;
69
70 mfxExtOpaqueSurfaceAlloc opaque_alloc;
71 mfxExtBuffer *ext_buffers[1];
72} QSVFramesContext;
73
74static const struct {
75 mfxHandleType handle_type;
76 enum AVHWDeviceType device_type;
77 enum AVPixelFormat pix_fmt;
78} supported_handle_types[] = {
79#if CONFIG_VAAPI
80 { MFX_HANDLE_VA_DISPLAY, AV_HWDEVICE_TYPE_VAAPI, AV_PIX_FMT_VAAPI },
81#endif
82#if CONFIG_DXVA2
83 { MFX_HANDLE_D3D9_DEVICE_MANAGER, AV_HWDEVICE_TYPE_DXVA2, AV_PIX_FMT_DXVA2_VLD },
84#endif
85 { 0 },
86};
87
88static const struct {
89 enum AVPixelFormat pix_fmt;
90 uint32_t fourcc;
91} supported_pixel_formats[] = {
92 { AV_PIX_FMT_NV12, MFX_FOURCC_NV12 },
d20c1189 93 { AV_PIX_FMT_P010, MFX_FOURCC_P010 },
b115a35e 94 { AV_PIX_FMT_PAL8, MFX_FOURCC_P8 },
59e7361c
AK
95};
96
eaa5e071
MT
97static uint32_t qsv_fourcc_from_pix_fmt(enum AVPixelFormat pix_fmt)
98{
99 int i;
100 for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++) {
101 if (supported_pixel_formats[i].pix_fmt == pix_fmt)
102 return supported_pixel_formats[i].fourcc;
103 }
104 return 0;
105}
106
59e7361c
AK
107static int qsv_device_init(AVHWDeviceContext *ctx)
108{
109 AVQSVDeviceContext *hwctx = ctx->hwctx;
110 QSVDeviceContext *s = ctx->internal->priv;
111
112 mfxStatus err;
113 int i;
114
115 for (i = 0; supported_handle_types[i].handle_type; i++) {
116 err = MFXVideoCORE_GetHandle(hwctx->session, supported_handle_types[i].handle_type,
117 &s->handle);
118 if (err == MFX_ERR_NONE) {
119 s->handle_type = supported_handle_types[i].handle_type;
120 s->child_device_type = supported_handle_types[i].device_type;
121 s->child_pix_fmt = supported_handle_types[i].pix_fmt;
122 break;
123 }
124 }
125 if (!s->handle) {
126 av_log(ctx, AV_LOG_VERBOSE, "No supported hw handle could be retrieved "
127 "from the session\n");
128 }
129
130 err = MFXQueryIMPL(hwctx->session, &s->impl);
131 if (err == MFX_ERR_NONE)
132 err = MFXQueryVersion(hwctx->session, &s->ver);
133 if (err != MFX_ERR_NONE) {
134 av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
135 return AVERROR_UNKNOWN;
136 }
137
138 return 0;
139}
140
141static void qsv_frames_uninit(AVHWFramesContext *ctx)
142{
143 QSVFramesContext *s = ctx->internal->priv;
144
145 if (s->session_download) {
146 MFXVideoVPP_Close(s->session_download);
147 MFXClose(s->session_download);
148 }
149 s->session_download = NULL;
150
151 if (s->session_upload) {
152 MFXVideoVPP_Close(s->session_upload);
153 MFXClose(s->session_upload);
154 }
155 s->session_upload = NULL;
156
157 av_freep(&s->mem_ids);
158 av_freep(&s->surface_ptrs);
159 av_freep(&s->surfaces_internal);
160 av_buffer_unref(&s->child_frames_ref);
161}
162
163static void qsv_pool_release_dummy(void *opaque, uint8_t *data)
164{
165}
166
167static AVBufferRef *qsv_pool_alloc(void *opaque, int size)
168{
169 AVHWFramesContext *ctx = (AVHWFramesContext*)opaque;
170 QSVFramesContext *s = ctx->internal->priv;
171 AVQSVFramesContext *hwctx = ctx->hwctx;
172
173 if (s->nb_surfaces_used < hwctx->nb_surfaces) {
174 s->nb_surfaces_used++;
175 return av_buffer_create((uint8_t*)(s->surfaces_internal + s->nb_surfaces_used - 1),
176 sizeof(*hwctx->surfaces), qsv_pool_release_dummy, NULL, 0);
177 }
178
179 return NULL;
180}
181
182static int qsv_init_child_ctx(AVHWFramesContext *ctx)
183{
184 AVQSVFramesContext *hwctx = ctx->hwctx;
185 QSVFramesContext *s = ctx->internal->priv;
186 QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
187
188 AVBufferRef *child_device_ref = NULL;
189 AVBufferRef *child_frames_ref = NULL;
190
191 AVHWDeviceContext *child_device_ctx;
192 AVHWFramesContext *child_frames_ctx;
193
194 int i, ret = 0;
195
196 if (!device_priv->handle) {
197 av_log(ctx, AV_LOG_ERROR,
198 "Cannot create a non-opaque internal surface pool without "
199 "a hardware handle\n");
200 return AVERROR(EINVAL);
201 }
202
203 child_device_ref = av_hwdevice_ctx_alloc(device_priv->child_device_type);
204 if (!child_device_ref)
205 return AVERROR(ENOMEM);
206 child_device_ctx = (AVHWDeviceContext*)child_device_ref->data;
207
208#if CONFIG_VAAPI
209 if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
210 AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
211 child_device_hwctx->display = (VADisplay)device_priv->handle;
212 }
213#endif
214#if CONFIG_DXVA2
215 if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
216 AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
217 child_device_hwctx->devmgr = (IDirect3DDeviceManager9*)device_priv->handle;
218 }
219#endif
220
221 ret = av_hwdevice_ctx_init(child_device_ref);
222 if (ret < 0) {
223 av_log(ctx, AV_LOG_ERROR, "Error initializing a child device context\n");
224 goto fail;
225 }
226
227 child_frames_ref = av_hwframe_ctx_alloc(child_device_ref);
228 if (!child_frames_ref) {
229 ret = AVERROR(ENOMEM);
230 goto fail;
231 }
232 child_frames_ctx = (AVHWFramesContext*)child_frames_ref->data;
233
234 child_frames_ctx->format = device_priv->child_pix_fmt;
235 child_frames_ctx->sw_format = ctx->sw_format;
236 child_frames_ctx->initial_pool_size = ctx->initial_pool_size;
86499771
RS
237 child_frames_ctx->width = FFALIGN(ctx->width, 16);
238 child_frames_ctx->height = FFALIGN(ctx->height, 16);
59e7361c
AK
239
240#if CONFIG_DXVA2
241 if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
242 AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
243 if (hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET)
244 child_frames_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
245 else
246 child_frames_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
247 }
248#endif
249
250 ret = av_hwframe_ctx_init(child_frames_ref);
251 if (ret < 0) {
252 av_log(ctx, AV_LOG_ERROR, "Error initializing a child frames context\n");
253 goto fail;
254 }
255
256#if CONFIG_VAAPI
257 if (child_device_ctx->type == AV_HWDEVICE_TYPE_VAAPI) {
258 AVVAAPIFramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
259 for (i = 0; i < ctx->initial_pool_size; i++)
260 s->surfaces_internal[i].Data.MemId = child_frames_hwctx->surface_ids + i;
261 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
262 }
263#endif
264#if CONFIG_DXVA2
265 if (child_device_ctx->type == AV_HWDEVICE_TYPE_DXVA2) {
266 AVDXVA2FramesContext *child_frames_hwctx = child_frames_ctx->hwctx;
267 for (i = 0; i < ctx->initial_pool_size; i++)
268 s->surfaces_internal[i].Data.MemId = (mfxMemId)child_frames_hwctx->surfaces[i];
269 if (child_frames_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
270 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
271 else
272 hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
273 }
274#endif
275
276 s->child_frames_ref = child_frames_ref;
277 child_frames_ref = NULL;
278
279fail:
280 av_buffer_unref(&child_device_ref);
281 av_buffer_unref(&child_frames_ref);
282 return ret;
283}
284
eaa5e071
MT
285static int qsv_init_surface(AVHWFramesContext *ctx, mfxFrameSurface1 *surf)
286{
287 const AVPixFmtDescriptor *desc;
288 uint32_t fourcc;
289
290 desc = av_pix_fmt_desc_get(ctx->sw_format);
291 if (!desc)
292 return AVERROR(EINVAL);
293
294 fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
295 if (!fourcc)
296 return AVERROR(EINVAL);
297
298 surf->Info.BitDepthLuma = desc->comp[0].depth;
299 surf->Info.BitDepthChroma = desc->comp[0].depth;
300 surf->Info.Shift = desc->comp[0].depth > 8;
301
302 if (desc->log2_chroma_w && desc->log2_chroma_h)
303 surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
304 else if (desc->log2_chroma_w)
305 surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
306 else
307 surf->Info.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
308
309 surf->Info.FourCC = fourcc;
86499771 310 surf->Info.Width = FFALIGN(ctx->width, 16);
eaa5e071 311 surf->Info.CropW = ctx->width;
86499771 312 surf->Info.Height = FFALIGN(ctx->height, 16);
eaa5e071
MT
313 surf->Info.CropH = ctx->height;
314 surf->Info.FrameRateExtN = 25;
315 surf->Info.FrameRateExtD = 1;
8ca39b85 316 surf->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
eaa5e071
MT
317
318 return 0;
319}
320
59e7361c
AK
321static int qsv_init_pool(AVHWFramesContext *ctx, uint32_t fourcc)
322{
323 QSVFramesContext *s = ctx->internal->priv;
324 AVQSVFramesContext *frames_hwctx = ctx->hwctx;
59e7361c
AK
325
326 int i, ret = 0;
327
59e7361c
AK
328 if (ctx->initial_pool_size <= 0) {
329 av_log(ctx, AV_LOG_ERROR, "QSV requires a fixed frame pool size\n");
330 return AVERROR(EINVAL);
331 }
332
333 s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size,
334 sizeof(*s->surfaces_internal));
335 if (!s->surfaces_internal)
336 return AVERROR(ENOMEM);
337
338 for (i = 0; i < ctx->initial_pool_size; i++) {
eaa5e071
MT
339 ret = qsv_init_surface(ctx, &s->surfaces_internal[i]);
340 if (ret < 0)
341 return ret;
59e7361c
AK
342 }
343
344 if (!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME)) {
345 ret = qsv_init_child_ctx(ctx);
346 if (ret < 0)
347 return ret;
348 }
349
350 ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(mfxFrameSurface1),
351 ctx, qsv_pool_alloc, NULL);
352 if (!ctx->internal->pool_internal)
353 return AVERROR(ENOMEM);
354
355 frames_hwctx->surfaces = s->surfaces_internal;
356 frames_hwctx->nb_surfaces = ctx->initial_pool_size;
357
358 return 0;
359}
360
361static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
362 mfxFrameAllocResponse *resp)
363{
364 AVHWFramesContext *ctx = pthis;
365 QSVFramesContext *s = ctx->internal->priv;
366 AVQSVFramesContext *hwctx = ctx->hwctx;
367 mfxFrameInfo *i = &req->Info;
368 mfxFrameInfo *i1 = &hwctx->surfaces[0].Info;
369
370 if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
371 !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
372 !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
373 return MFX_ERR_UNSUPPORTED;
374 if (i->Width != i1->Width || i->Height != i1->Height ||
375 i->FourCC != i1->FourCC || i->ChromaFormat != i1->ChromaFormat) {
376 av_log(ctx, AV_LOG_ERROR, "Mismatching surface properties in an "
377 "allocation request: %dx%d %d %d vs %dx%d %d %d\n",
378 i->Width, i->Height, i->FourCC, i->ChromaFormat,
379 i1->Width, i1->Height, i1->FourCC, i1->ChromaFormat);
380 return MFX_ERR_UNSUPPORTED;
381 }
382
383 resp->mids = s->mem_ids;
384 resp->NumFrameActual = hwctx->nb_surfaces;
385
386 return MFX_ERR_NONE;
387}
388
389static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
390{
391 return MFX_ERR_NONE;
392}
393
394static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
395{
396 return MFX_ERR_UNSUPPORTED;
397}
398
399static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
400{
401 return MFX_ERR_UNSUPPORTED;
402}
403
404static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
405{
406 *hdl = mid;
407 return MFX_ERR_NONE;
408}
409
410static int qsv_init_internal_session(AVHWFramesContext *ctx,
411 mfxSession *session, int upload)
412{
413 QSVFramesContext *s = ctx->internal->priv;
414 AVQSVFramesContext *frames_hwctx = ctx->hwctx;
415 QSVDeviceContext *device_priv = ctx->device_ctx->internal->priv;
416 int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
417
418 mfxFrameAllocator frame_allocator = {
419 .pthis = ctx,
420 .Alloc = frame_alloc,
421 .Lock = frame_lock,
422 .Unlock = frame_unlock,
423 .GetHDL = frame_get_hdl,
424 .Free = frame_free,
425 };
426
427 mfxVideoParam par;
428 mfxStatus err;
429
430 err = MFXInit(device_priv->impl, &device_priv->ver, session);
431 if (err != MFX_ERR_NONE) {
432 av_log(ctx, AV_LOG_ERROR, "Error initializing an internal session\n");
433 return AVERROR_UNKNOWN;
434 }
435
436 if (device_priv->handle) {
437 err = MFXVideoCORE_SetHandle(*session, device_priv->handle_type,
438 device_priv->handle);
439 if (err != MFX_ERR_NONE)
440 return AVERROR_UNKNOWN;
441 }
442
443 if (!opaque) {
444 err = MFXVideoCORE_SetFrameAllocator(*session, &frame_allocator);
445 if (err != MFX_ERR_NONE)
446 return AVERROR_UNKNOWN;
447 }
448
449 memset(&par, 0, sizeof(par));
450
451 if (opaque) {
452 par.ExtParam = s->ext_buffers;
453 par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
454 par.IOPattern = upload ? MFX_IOPATTERN_OUT_OPAQUE_MEMORY :
455 MFX_IOPATTERN_IN_OPAQUE_MEMORY;
456 } else {
457 par.IOPattern = upload ? MFX_IOPATTERN_OUT_VIDEO_MEMORY :
458 MFX_IOPATTERN_IN_VIDEO_MEMORY;
459 }
460
461 par.IOPattern |= upload ? MFX_IOPATTERN_IN_SYSTEM_MEMORY :
462 MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
463 par.AsyncDepth = 1;
464
465 par.vpp.In = frames_hwctx->surfaces[0].Info;
466
467 /* Apparently VPP requires the frame rate to be set to some value, otherwise
468 * init will fail (probably for the framerate conversion filter). Since we
469 * are only doing data upload/download here, we just invent an arbitrary
470 * value */
471 par.vpp.In.FrameRateExtN = 25;
472 par.vpp.In.FrameRateExtD = 1;
473 par.vpp.Out = par.vpp.In;
474
475 err = MFXVideoVPP_Init(*session, &par);
476 if (err != MFX_ERR_NONE) {
b91ce486
AK
477 av_log(ctx, AV_LOG_VERBOSE, "Error opening the internal VPP session."
478 "Surface upload/download will not be possible\n");
479 MFXClose(*session);
480 *session = NULL;
59e7361c
AK
481 }
482
483 return 0;
484}
485
486static int qsv_frames_init(AVHWFramesContext *ctx)
487{
488 QSVFramesContext *s = ctx->internal->priv;
489 AVQSVFramesContext *frames_hwctx = ctx->hwctx;
490
491 int opaque = !!(frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
492
eaa5e071 493 uint32_t fourcc;
59e7361c
AK
494 int i, ret;
495
eaa5e071 496 fourcc = qsv_fourcc_from_pix_fmt(ctx->sw_format);
59e7361c
AK
497 if (!fourcc) {
498 av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format\n");
499 return AVERROR(ENOSYS);
500 }
501
502 if (!ctx->pool) {
503 ret = qsv_init_pool(ctx, fourcc);
504 if (ret < 0) {
505 av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n");
506 return ret;
507 }
508 }
509
510 if (opaque) {
511 s->surface_ptrs = av_mallocz_array(frames_hwctx->nb_surfaces,
512 sizeof(*s->surface_ptrs));
513 if (!s->surface_ptrs)
514 return AVERROR(ENOMEM);
515
516 for (i = 0; i < frames_hwctx->nb_surfaces; i++)
517 s->surface_ptrs[i] = frames_hwctx->surfaces + i;
518
519 s->opaque_alloc.In.Surfaces = s->surface_ptrs;
520 s->opaque_alloc.In.NumSurface = frames_hwctx->nb_surfaces;
521 s->opaque_alloc.In.Type = frames_hwctx->frame_type;
522
523 s->opaque_alloc.Out = s->opaque_alloc.In;
524
525 s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
526 s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
527
528 s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
529 } else {
530 s->mem_ids = av_mallocz_array(frames_hwctx->nb_surfaces, sizeof(*s->mem_ids));
531 if (!s->mem_ids)
532 return AVERROR(ENOMEM);
533
534 for (i = 0; i < frames_hwctx->nb_surfaces; i++)
535 s->mem_ids[i] = frames_hwctx->surfaces[i].Data.MemId;
536 }
537
538 ret = qsv_init_internal_session(ctx, &s->session_download, 0);
539 if (ret < 0)
540 return ret;
541
542 ret = qsv_init_internal_session(ctx, &s->session_upload, 1);
543 if (ret < 0)
544 return ret;
545
546 return 0;
547}
548
549static int qsv_get_buffer(AVHWFramesContext *ctx, AVFrame *frame)
550{
551 frame->buf[0] = av_buffer_pool_get(ctx->pool);
552 if (!frame->buf[0])
553 return AVERROR(ENOMEM);
554
555 frame->data[3] = frame->buf[0]->data;
556 frame->format = AV_PIX_FMT_QSV;
557 frame->width = ctx->width;
558 frame->height = ctx->height;
559
560 return 0;
561}
562
563static int qsv_transfer_get_formats(AVHWFramesContext *ctx,
564 enum AVHWFrameTransferDirection dir,
565 enum AVPixelFormat **formats)
566{
567 enum AVPixelFormat *fmts;
568
569 fmts = av_malloc_array(2, sizeof(*fmts));
570 if (!fmts)
571 return AVERROR(ENOMEM);
572
573 fmts[0] = ctx->sw_format;
574 fmts[1] = AV_PIX_FMT_NONE;
575
576 *formats = fmts;
577
578 return 0;
579}
580
e1c5d56b
MT
581static int qsv_frames_derive_from(AVHWFramesContext *dst_ctx,
582 AVHWFramesContext *src_ctx, int flags)
583{
584 AVQSVFramesContext *src_hwctx = src_ctx->hwctx;
585 int i;
586
587 switch (dst_ctx->device_ctx->type) {
588#if CONFIG_VAAPI
589 case AV_HWDEVICE_TYPE_VAAPI:
590 {
591 AVVAAPIFramesContext *dst_hwctx = dst_ctx->hwctx;
592 dst_hwctx->surface_ids = av_mallocz_array(src_hwctx->nb_surfaces,
593 sizeof(*dst_hwctx->surface_ids));
594 if (!dst_hwctx->surface_ids)
595 return AVERROR(ENOMEM);
596 for (i = 0; i < src_hwctx->nb_surfaces; i++)
597 dst_hwctx->surface_ids[i] =
598 *(VASurfaceID*)src_hwctx->surfaces[i].Data.MemId;
599 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
600 }
601 break;
602#endif
603#if CONFIG_DXVA2
604 case AV_HWDEVICE_TYPE_DXVA2:
605 {
606 AVDXVA2FramesContext *dst_hwctx = dst_ctx->hwctx;
607 dst_hwctx->surfaces = av_mallocz_array(src_hwctx->nb_surfaces,
608 sizeof(*dst_hwctx->surfaces));
609 if (!dst_hwctx->surfaces)
610 return AVERROR(ENOMEM);
611 for (i = 0; i < src_hwctx->nb_surfaces; i++)
612 dst_hwctx->surfaces[i] =
613 (IDirect3DSurface9*)src_hwctx->surfaces[i].Data.MemId;
614 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
615 if (src_hwctx->frame_type == MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)
616 dst_hwctx->surface_type = DXVA2_VideoDecoderRenderTarget;
617 else
618 dst_hwctx->surface_type = DXVA2_VideoProcessorRenderTarget;
619 }
620 break;
621#endif
622 default:
623 return AVERROR(ENOSYS);
624 }
625
626 return 0;
627}
628
e8bbacbf
AK
629static int qsv_map_from(AVHWFramesContext *ctx,
630 AVFrame *dst, const AVFrame *src, int flags)
631{
632 QSVFramesContext *s = ctx->internal->priv;
633 mfxFrameSurface1 *surf = (mfxFrameSurface1*)src->data[3];
634 AVHWFramesContext *child_frames_ctx;
e1c5d56b
MT
635 const AVPixFmtDescriptor *desc;
636 uint8_t *child_data;
e8bbacbf
AK
637 AVFrame *dummy;
638 int ret = 0;
639
640 if (!s->child_frames_ref)
641 return AVERROR(ENOSYS);
642 child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
643
e1c5d56b
MT
644 switch (child_frames_ctx->device_ctx->type) {
645#if CONFIG_VAAPI
646 case AV_HWDEVICE_TYPE_VAAPI:
647 child_data = (uint8_t*)(intptr_t)*(VASurfaceID*)surf->Data.MemId;
648 break;
649#endif
650#if CONFIG_DXVA2
651 case AV_HWDEVICE_TYPE_DXVA2:
652 child_data = surf->Data.MemId;
653 break;
654#endif
655 default:
656 return AVERROR(ENOSYS);
657 }
658
659 if (dst->format == child_frames_ctx->format) {
660 ret = ff_hwframe_map_create(s->child_frames_ref,
661 dst, src, NULL, NULL);
662 if (ret < 0)
663 return ret;
664
665 dst->width = src->width;
666 dst->height = src->height;
667 dst->data[3] = child_data;
668
669 return 0;
670 }
671
672 desc = av_pix_fmt_desc_get(dst->format);
673 if (desc && desc->flags & AV_PIX_FMT_FLAG_HWACCEL) {
674 // This only supports mapping to software.
675 return AVERROR(ENOSYS);
676 }
677
e8bbacbf
AK
678 dummy = av_frame_alloc();
679 if (!dummy)
680 return AVERROR(ENOMEM);
681
682 dummy->buf[0] = av_buffer_ref(src->buf[0]);
683 dummy->hw_frames_ctx = av_buffer_ref(s->child_frames_ref);
684 if (!dummy->buf[0] || !dummy->hw_frames_ctx)
685 goto fail;
686
687 dummy->format = child_frames_ctx->format;
688 dummy->width = src->width;
689 dummy->height = src->height;
e1c5d56b 690 dummy->data[3] = child_data;
e8bbacbf
AK
691
692 ret = av_hwframe_map(dst, dummy, flags);
693
694fail:
695 av_frame_free(&dummy);
696
697 return ret;
698}
699
8ea15afb
AK
700static int qsv_transfer_data_child(AVHWFramesContext *ctx, AVFrame *dst,
701 const AVFrame *src)
702{
703 QSVFramesContext *s = ctx->internal->priv;
704 AVHWFramesContext *child_frames_ctx = (AVHWFramesContext*)s->child_frames_ref->data;
705 int download = !!src->hw_frames_ctx;
706 mfxFrameSurface1 *surf = (mfxFrameSurface1*)(download ? src->data[3] : dst->data[3]);
707
708 AVFrame *dummy;
709 int ret;
710
711 dummy = av_frame_alloc();
712 if (!dummy)
713 return AVERROR(ENOMEM);
714
715 dummy->format = child_frames_ctx->format;
716 dummy->width = src->width;
717 dummy->height = src->height;
718 dummy->buf[0] = download ? src->buf[0] : dst->buf[0];
719 dummy->data[3] = surf->Data.MemId;
720 dummy->hw_frames_ctx = s->child_frames_ref;
721
722 ret = download ? av_hwframe_transfer_data(dst, dummy, 0) :
723 av_hwframe_transfer_data(dummy, src, 0);
724
725 dummy->buf[0] = NULL;
726 dummy->data[3] = NULL;
727 dummy->hw_frames_ctx = NULL;
728
729 av_frame_free(&dummy);
730
731 return ret;
732}
733
59e7361c
AK
734static int qsv_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst,
735 const AVFrame *src)
736{
737 QSVFramesContext *s = ctx->internal->priv;
738 mfxFrameSurface1 out = {{ 0 }};
739 mfxFrameSurface1 *in = (mfxFrameSurface1*)src->data[3];
740
741 mfxSyncPoint sync = NULL;
742 mfxStatus err;
743
b91ce486 744 if (!s->session_download) {
8ea15afb
AK
745 if (s->child_frames_ref)
746 return qsv_transfer_data_child(ctx, dst, src);
747
b91ce486
AK
748 av_log(ctx, AV_LOG_ERROR, "Surface download not possible\n");
749 return AVERROR(ENOSYS);
750 }
751
59e7361c
AK
752 out.Info = in->Info;
753 out.Data.PitchLow = dst->linesize[0];
754 out.Data.Y = dst->data[0];
755 out.Data.U = dst->data[1];
756 out.Data.V = dst->data[2];
757 out.Data.A = dst->data[3];
758
759 do {
760 err = MFXVideoVPP_RunFrameVPPAsync(s->session_download, in, &out, NULL, &sync);
761 if (err == MFX_WRN_DEVICE_BUSY)
762 av_usleep(1);
763 } while (err == MFX_WRN_DEVICE_BUSY);
764
765 if (err < 0 || !sync) {
766 av_log(ctx, AV_LOG_ERROR, "Error downloading the surface\n");
767 return AVERROR_UNKNOWN;
768 }
769
770 do {
771 err = MFXVideoCORE_SyncOperation(s->session_download, sync, 1000);
772 } while (err == MFX_WRN_IN_EXECUTION);
773 if (err < 0) {
774 av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
775 return AVERROR_UNKNOWN;
776 }
777
778 return 0;
779}
780
781static int qsv_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst,
782 const AVFrame *src)
783{
784 QSVFramesContext *s = ctx->internal->priv;
785 mfxFrameSurface1 in = {{ 0 }};
786 mfxFrameSurface1 *out = (mfxFrameSurface1*)dst->data[3];
787
788 mfxSyncPoint sync = NULL;
789 mfxStatus err;
790
b91ce486 791 if (!s->session_upload) {
8ea15afb
AK
792 if (s->child_frames_ref)
793 return qsv_transfer_data_child(ctx, dst, src);
794
b91ce486
AK
795 av_log(ctx, AV_LOG_ERROR, "Surface upload not possible\n");
796 return AVERROR(ENOSYS);
797 }
798
59e7361c
AK
799 in.Info = out->Info;
800 in.Data.PitchLow = src->linesize[0];
801 in.Data.Y = src->data[0];
802 in.Data.U = src->data[1];
803 in.Data.V = src->data[2];
804 in.Data.A = src->data[3];
805
806 do {
807 err = MFXVideoVPP_RunFrameVPPAsync(s->session_upload, &in, out, NULL, &sync);
808 if (err == MFX_WRN_DEVICE_BUSY)
809 av_usleep(1);
810 } while (err == MFX_WRN_DEVICE_BUSY);
811
812 if (err < 0 || !sync) {
813 av_log(ctx, AV_LOG_ERROR, "Error uploading the surface\n");
814 return AVERROR_UNKNOWN;
815 }
816
817 do {
818 err = MFXVideoCORE_SyncOperation(s->session_upload, sync, 1000);
819 } while (err == MFX_WRN_IN_EXECUTION);
820 if (err < 0) {
821 av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation\n");
822 return AVERROR_UNKNOWN;
823 }
824
825 return 0;
826}
827
eaa5e071
MT
828static int qsv_frames_derive_to(AVHWFramesContext *dst_ctx,
829 AVHWFramesContext *src_ctx, int flags)
830{
831 QSVFramesContext *s = dst_ctx->internal->priv;
832 AVQSVFramesContext *dst_hwctx = dst_ctx->hwctx;
833 int i;
834
835 switch (src_ctx->device_ctx->type) {
836#if CONFIG_VAAPI
837 case AV_HWDEVICE_TYPE_VAAPI:
838 {
839 AVVAAPIFramesContext *src_hwctx = src_ctx->hwctx;
840 s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
841 sizeof(*s->surfaces_internal));
842 if (!s->surfaces_internal)
843 return AVERROR(ENOMEM);
844 for (i = 0; i < src_hwctx->nb_surfaces; i++) {
845 qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
846 s->surfaces_internal[i].Data.MemId = src_hwctx->surface_ids + i;
847 }
848 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
849 dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
850 }
851 break;
852#endif
853#if CONFIG_DXVA2
854 case AV_HWDEVICE_TYPE_DXVA2:
855 {
856 AVDXVA2FramesContext *src_hwctx = src_ctx->hwctx;
857 s->surfaces_internal = av_mallocz_array(src_hwctx->nb_surfaces,
858 sizeof(*s->surfaces_internal));
859 if (!s->surfaces_internal)
860 return AVERROR(ENOMEM);
861 for (i = 0; i < src_hwctx->nb_surfaces; i++) {
862 qsv_init_surface(dst_ctx, &s->surfaces_internal[i]);
863 s->surfaces_internal[i].Data.MemId = (mfxMemId)src_hwctx->surfaces[i];
864 }
865 dst_hwctx->nb_surfaces = src_hwctx->nb_surfaces;
866 if (src_hwctx->surface_type == DXVA2_VideoProcessorRenderTarget)
867 dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET;
868 else
869 dst_hwctx->frame_type = MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
870 }
871 break;
872#endif
873 default:
874 return AVERROR(ENOSYS);
875 }
876
877 dst_hwctx->surfaces = s->surfaces_internal;
878
879 return 0;
880}
881
882static int qsv_map_to(AVHWFramesContext *dst_ctx,
883 AVFrame *dst, const AVFrame *src, int flags)
884{
885 AVQSVFramesContext *hwctx = dst_ctx->hwctx;
886 int i, err;
887
888 for (i = 0; i < hwctx->nb_surfaces; i++) {
889#if CONFIG_VAAPI
890 if (*(VASurfaceID*)hwctx->surfaces[i].Data.MemId ==
891 (VASurfaceID)(uintptr_t)src->data[3])
892 break;
893#endif
894#if CONFIG_DXVA2
895 if ((IDirect3DSurface9*)hwctx->surfaces[i].Data.MemId ==
896 (IDirect3DSurface9*)(uintptr_t)src->data[3])
897 break;
898#endif
899 }
900 if (i >= hwctx->nb_surfaces) {
901 av_log(dst_ctx, AV_LOG_ERROR, "Trying to map from a surface which "
902 "is not in the mapped frames context.\n");
903 return AVERROR(EINVAL);
904 }
905
906 err = ff_hwframe_map_create(dst->hw_frames_ctx,
907 dst, src, NULL, NULL);
908 if (err)
909 return err;
910
911 dst->width = src->width;
912 dst->height = src->height;
913 dst->data[3] = (uint8_t*)&hwctx->surfaces[i];
914
915 return 0;
916}
917
59e7361c
AK
918static int qsv_frames_get_constraints(AVHWDeviceContext *ctx,
919 const void *hwconfig,
920 AVHWFramesConstraints *constraints)
921{
922 int i;
923
924 constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(supported_pixel_formats) + 1,
925 sizeof(*constraints->valid_sw_formats));
926 if (!constraints->valid_sw_formats)
927 return AVERROR(ENOMEM);
928
929 for (i = 0; i < FF_ARRAY_ELEMS(supported_pixel_formats); i++)
930 constraints->valid_sw_formats[i] = supported_pixel_formats[i].pix_fmt;
931 constraints->valid_sw_formats[FF_ARRAY_ELEMS(supported_pixel_formats)] = AV_PIX_FMT_NONE;
932
933 constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats));
934 if (!constraints->valid_hw_formats)
935 return AVERROR(ENOMEM);
936
937 constraints->valid_hw_formats[0] = AV_PIX_FMT_QSV;
938 constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE;
939
940 return 0;
941}
942
943static void qsv_device_free(AVHWDeviceContext *ctx)
944{
945 AVQSVDeviceContext *hwctx = ctx->hwctx;
946 QSVDevicePriv *priv = ctx->user_opaque;
947
948 if (hwctx->session)
949 MFXClose(hwctx->session);
950
951 av_buffer_unref(&priv->child_device_ctx);
952 av_freep(&priv);
953}
954
955static mfxIMPL choose_implementation(const char *device)
956{
957 static const struct {
958 const char *name;
959 mfxIMPL impl;
960 } impl_map[] = {
961 { "auto", MFX_IMPL_AUTO },
962 { "sw", MFX_IMPL_SOFTWARE },
963 { "hw", MFX_IMPL_HARDWARE },
964 { "auto_any", MFX_IMPL_AUTO_ANY },
965 { "hw_any", MFX_IMPL_HARDWARE_ANY },
966 { "hw2", MFX_IMPL_HARDWARE2 },
967 { "hw3", MFX_IMPL_HARDWARE3 },
968 { "hw4", MFX_IMPL_HARDWARE4 },
969 };
970
971 mfxIMPL impl = MFX_IMPL_AUTO_ANY;
972 int i;
973
974 if (device) {
975 for (i = 0; i < FF_ARRAY_ELEMS(impl_map); i++)
976 if (!strcmp(device, impl_map[i].name)) {
977 impl = impl_map[i].impl;
978 break;
979 }
980 if (i == FF_ARRAY_ELEMS(impl_map))
981 impl = strtol(device, NULL, 0);
982 }
983
984 return impl;
985}
986
aa51bb3d
MT
987static int qsv_device_derive_from_child(AVHWDeviceContext *ctx,
988 mfxIMPL implementation,
989 AVHWDeviceContext *child_device_ctx,
990 int flags)
59e7361c
AK
991{
992 AVQSVDeviceContext *hwctx = ctx->hwctx;
59e7361c
AK
993
994 mfxVersion ver = { { 3, 1 } };
59e7361c
AK
995 mfxHDL handle;
996 mfxHandleType handle_type;
997 mfxStatus err;
998 int ret;
999
aa51bb3d
MT
1000 switch (child_device_ctx->type) {
1001#if CONFIG_VAAPI
1002 case AV_HWDEVICE_TYPE_VAAPI:
1003 {
1004 AVVAAPIDeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1005 handle_type = MFX_HANDLE_VA_DISPLAY;
1006 handle = (mfxHDL)child_device_hwctx->display;
1007 }
1008 break;
1009#endif
1010#if CONFIG_DXVA2
1011 case AV_HWDEVICE_TYPE_DXVA2:
1012 {
1013 AVDXVA2DeviceContext *child_device_hwctx = child_device_ctx->hwctx;
1014 handle_type = MFX_HANDLE_D3D9_DEVICE_MANAGER;
1015 handle = (mfxHDL)child_device_hwctx->devmgr;
1016 }
1017 break;
1018#endif
1019 default:
1020 ret = AVERROR(ENOSYS);
1021 goto fail;
1022 }
1023
1024 err = MFXInit(implementation, &ver, &hwctx->session);
1025 if (err != MFX_ERR_NONE) {
1026 av_log(ctx, AV_LOG_ERROR, "Error initializing an MFX session: "
1027 "%d.\n", err);
1028 ret = AVERROR_UNKNOWN;
1029 goto fail;
1030 }
1031
ccbb31c1
LB
1032 err = MFXQueryVersion(hwctx->session, &ver);
1033 if (err != MFX_ERR_NONE) {
1034 av_log(ctx, AV_LOG_ERROR, "Error querying an MFX session: %d.\n", err);
1035 ret = AVERROR_UNKNOWN;
1036 goto fail;
1037 }
1038
e2319026
ZL
1039 av_log(ctx, AV_LOG_VERBOSE,
1040 "Initialize MFX session: API version is %d.%d, implementation version is %d.%d\n",
1041 MFX_VERSION_MAJOR, MFX_VERSION_MINOR, ver.Major, ver.Minor);
1042
ccbb31c1
LB
1043 MFXClose(hwctx->session);
1044
1045 err = MFXInit(implementation, &ver, &hwctx->session);
1046 if (err != MFX_ERR_NONE) {
1047 av_log(ctx, AV_LOG_ERROR,
1048 "Error initializing an MFX session: %d.\n", err);
1049 ret = AVERROR_UNKNOWN;
1050 goto fail;
1051 }
1052
aa51bb3d
MT
1053 err = MFXVideoCORE_SetHandle(hwctx->session, handle_type, handle);
1054 if (err != MFX_ERR_NONE) {
1055 av_log(ctx, AV_LOG_ERROR, "Error setting child device handle: "
1056 "%d\n", err);
1057 ret = AVERROR_UNKNOWN;
1058 goto fail;
1059 }
1060
aa51bb3d
MT
1061 return 0;
1062
1063fail:
1064 if (hwctx->session)
1065 MFXClose(hwctx->session);
1066 return ret;
1067}
1068
1069static int qsv_device_derive(AVHWDeviceContext *ctx,
1070 AVHWDeviceContext *child_device_ctx, int flags)
1071{
1072 return qsv_device_derive_from_child(ctx, MFX_IMPL_HARDWARE_ANY,
1073 child_device_ctx, flags);
1074}
1075
1076static int qsv_device_create(AVHWDeviceContext *ctx, const char *device,
1077 AVDictionary *opts, int flags)
1078{
1079 QSVDevicePriv *priv;
1080 enum AVHWDeviceType child_device_type;
1081 AVHWDeviceContext *child_device;
1082 AVDictionaryEntry *e;
1083
1084 mfxIMPL impl;
1085 int ret;
1086
59e7361c
AK
1087 priv = av_mallocz(sizeof(*priv));
1088 if (!priv)
1089 return AVERROR(ENOMEM);
1090
1091 ctx->user_opaque = priv;
1092 ctx->free = qsv_device_free;
1093
1094 e = av_dict_get(opts, "child_device", NULL, 0);
1095
1096 if (CONFIG_VAAPI)
1097 child_device_type = AV_HWDEVICE_TYPE_VAAPI;
1098 else if (CONFIG_DXVA2)
1099 child_device_type = AV_HWDEVICE_TYPE_DXVA2;
1100 else {
1101 av_log(ctx, AV_LOG_ERROR, "No supported child device type is enabled\n");
1102 return AVERROR(ENOSYS);
1103 }
1104
1105 ret = av_hwdevice_ctx_create(&priv->child_device_ctx, child_device_type,
1106 e ? e->value : NULL, NULL, 0);
1107 if (ret < 0)
1108 return ret;
1109
aa51bb3d 1110 child_device = (AVHWDeviceContext*)priv->child_device_ctx->data;
59e7361c
AK
1111
1112 impl = choose_implementation(device);
1113
aa51bb3d 1114 return qsv_device_derive_from_child(ctx, impl, child_device, 0);
59e7361c
AK
1115}
1116
1117const HWContextType ff_hwcontext_type_qsv = {
1118 .type = AV_HWDEVICE_TYPE_QSV,
1119 .name = "QSV",
1120
1121 .device_hwctx_size = sizeof(AVQSVDeviceContext),
1122 .device_priv_size = sizeof(QSVDeviceContext),
1123 .frames_hwctx_size = sizeof(AVQSVFramesContext),
1124 .frames_priv_size = sizeof(QSVFramesContext),
1125
1126 .device_create = qsv_device_create,
aa51bb3d 1127 .device_derive = qsv_device_derive,
59e7361c
AK
1128 .device_init = qsv_device_init,
1129 .frames_get_constraints = qsv_frames_get_constraints,
1130 .frames_init = qsv_frames_init,
1131 .frames_uninit = qsv_frames_uninit,
1132 .frames_get_buffer = qsv_get_buffer,
1133 .transfer_get_formats = qsv_transfer_get_formats,
1134 .transfer_data_to = qsv_transfer_data_to,
1135 .transfer_data_from = qsv_transfer_data_from,
eaa5e071 1136 .map_to = qsv_map_to,
e8bbacbf 1137 .map_from = qsv_map_from,
eaa5e071 1138 .frames_derive_to = qsv_frames_derive_to,
e1c5d56b 1139 .frames_derive_from = qsv_frames_derive_from,
59e7361c
AK
1140
1141 .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_QSV, AV_PIX_FMT_NONE },
1142};