h264_metadata: Add option to delete filler data
[libav.git] / libavfilter / qsvvpp.c
1 /*
2 * This file is part of Libav.
3 *
4 * Libav is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * Libav is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with Libav; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19 /**
20 * @file
21 * Intel Quick Sync Video VPP base function
22 */
23
24 #include "libavutil/common.h"
25 #include "libavutil/mathematics.h"
26 #include "libavutil/hwcontext.h"
27 #include "libavutil/hwcontext_qsv.h"
28 #include "libavutil/time.h"
29 #include "libavutil/pixdesc.h"
30
31 #include "internal.h"
32 #include "qsvvpp.h"
33 #include "video.h"
34
35 #define IS_VIDEO_MEMORY(mode) (mode & (MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET | \
36 MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET))
37 #define IS_OPAQUE_MEMORY(mode) (mode & MFX_MEMTYPE_OPAQUE_FRAME)
38 #define IS_SYSTEM_MEMORY(mode) (mode & MFX_MEMTYPE_SYSTEM_MEMORY)
39
40 typedef struct QSVFrame {
41 AVFrame *frame;
42 mfxFrameSurface1 *surface;
43 mfxFrameSurface1 surface_internal; /* for system memory */
44 struct QSVFrame *next;
45 } QSVFrame;
46
47 /* abstract struct for all QSV filters */
48 struct QSVVPPContext {
49 mfxSession session;
50 int (*filter_frame) (AVFilterLink *outlink, AVFrame *frame);/* callback */
51 enum AVPixelFormat out_sw_format; /* Real output format */
52 mfxVideoParam vpp_param;
53 mfxFrameInfo *frame_infos; /* frame info for each input */
54
55 /* members related to the input/output surface */
56 int in_mem_mode;
57 int out_mem_mode;
58 QSVFrame *in_frame_list;
59 QSVFrame *out_frame_list;
60 int nb_surface_ptrs_in;
61 int nb_surface_ptrs_out;
62 mfxFrameSurface1 **surface_ptrs_in;
63 mfxFrameSurface1 **surface_ptrs_out;
64
65 /* MFXVPP extern parameters */
66 mfxExtOpaqueSurfaceAlloc opaque_alloc;
67 mfxExtBuffer **ext_buffers;
68 int nb_ext_buffers;
69 };
70
71 static const mfxHandleType handle_types[] = {
72 MFX_HANDLE_VA_DISPLAY,
73 MFX_HANDLE_D3D9_DEVICE_MANAGER,
74 MFX_HANDLE_D3D11_DEVICE,
75 };
76
77 static const AVRational default_tb = { 1, 90000 };
78
79 /* functions for frameAlloc */
80 static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
81 mfxFrameAllocResponse *resp)
82 {
83 QSVVPPContext *s = pthis;
84 int i;
85
86 if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
87 !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
88 !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
89 return MFX_ERR_UNSUPPORTED;
90
91 if (req->Type & MFX_MEMTYPE_FROM_VPPIN) {
92 resp->mids = av_mallocz(s->nb_surface_ptrs_in * sizeof(*resp->mids));
93 if (!resp->mids)
94 return AVERROR(ENOMEM);
95
96 for (i = 0; i < s->nb_surface_ptrs_in; i++)
97 resp->mids[i] = s->surface_ptrs_in[i]->Data.MemId;
98
99 resp->NumFrameActual = s->nb_surface_ptrs_in;
100 } else {
101 resp->mids = av_mallocz(s->nb_surface_ptrs_out * sizeof(*resp->mids));
102 if (!resp->mids)
103 return AVERROR(ENOMEM);
104
105 for (i = 0; i < s->nb_surface_ptrs_out; i++)
106 resp->mids[i] = s->surface_ptrs_out[i]->Data.MemId;
107
108 resp->NumFrameActual = s->nb_surface_ptrs_out;
109 }
110
111 return MFX_ERR_NONE;
112 }
113
114 static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
115 {
116 av_freep(&resp->mids);
117 return MFX_ERR_NONE;
118 }
119
120 static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
121 {
122 return MFX_ERR_UNSUPPORTED;
123 }
124
125 static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
126 {
127 return MFX_ERR_UNSUPPORTED;
128 }
129
130 static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
131 {
132 *hdl = mid;
133 return MFX_ERR_NONE;
134 }
135
136 static int pix_fmt_to_mfx_fourcc(int format)
137 {
138 switch (format) {
139 case AV_PIX_FMT_YUV420P:
140 return MFX_FOURCC_YV12;
141 case AV_PIX_FMT_NV12:
142 return MFX_FOURCC_NV12;
143 case AV_PIX_FMT_YUYV422:
144 return MFX_FOURCC_YUY2;
145 case AV_PIX_FMT_RGB32:
146 return MFX_FOURCC_RGB4;
147 }
148
149 return MFX_FOURCC_NV12;
150 }
151
152 static int map_frame_to_surface(AVFrame *frame, mfxFrameSurface1 *surface)
153 {
154 switch (frame->format) {
155 case AV_PIX_FMT_NV12:
156 surface->Data.Y = frame->data[0];
157 surface->Data.UV = frame->data[1];
158 break;
159 case AV_PIX_FMT_YUV420P:
160 surface->Data.Y = frame->data[0];
161 surface->Data.U = frame->data[1];
162 surface->Data.V = frame->data[2];
163 break;
164 case AV_PIX_FMT_YUYV422:
165 surface->Data.Y = frame->data[0];
166 surface->Data.U = frame->data[0] + 1;
167 surface->Data.V = frame->data[0] + 3;
168 break;
169 case AV_PIX_FMT_RGB32:
170 surface->Data.B = frame->data[0];
171 surface->Data.G = frame->data[0] + 1;
172 surface->Data.R = frame->data[0] + 2;
173 surface->Data.A = frame->data[0] + 3;
174 break;
175 default:
176 return MFX_ERR_UNSUPPORTED;
177 }
178 surface->Data.Pitch = frame->linesize[0];
179
180 return 0;
181 }
182
183 /* fill the surface info */
184 static int fill_frameinfo_by_link(mfxFrameInfo *frameinfo, AVFilterLink *link)
185 {
186 enum AVPixelFormat pix_fmt;
187 AVHWFramesContext *frames_ctx;
188 AVQSVFramesContext *frames_hwctx;
189 const AVPixFmtDescriptor *desc;
190
191 if (link->format == AV_PIX_FMT_QSV) {
192 if (!link->hw_frames_ctx)
193 return AVERROR(EINVAL);
194
195 frames_ctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
196 frames_hwctx = frames_ctx->hwctx;
197 *frameinfo = frames_hwctx->surfaces[0].Info;
198 } else {
199 pix_fmt = link->format;
200 desc = av_pix_fmt_desc_get(pix_fmt);
201 if (!desc)
202 return AVERROR_BUG;
203
204 frameinfo->CropX = 0;
205 frameinfo->CropY = 0;
206 frameinfo->Width = FFALIGN(link->w, 32);
207 frameinfo->Height = FFALIGN(link->h, 32);
208 frameinfo->PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
209 frameinfo->FourCC = pix_fmt_to_mfx_fourcc(pix_fmt);
210 frameinfo->BitDepthLuma = desc->comp[0].depth;
211 frameinfo->BitDepthChroma = desc->comp[0].depth;
212 frameinfo->Shift = desc->comp[0].depth > 8;
213 if (desc->log2_chroma_w && desc->log2_chroma_h)
214 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV420;
215 else if (desc->log2_chroma_w)
216 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV422;
217 else
218 frameinfo->ChromaFormat = MFX_CHROMAFORMAT_YUV444;
219 }
220
221 frameinfo->CropW = link->w;
222 frameinfo->CropH = link->h;
223 frameinfo->FrameRateExtN = link->frame_rate.num;
224 frameinfo->FrameRateExtD = link->frame_rate.den;
225 frameinfo->AspectRatioW = link->sample_aspect_ratio.num ? link->sample_aspect_ratio.num : 1;
226 frameinfo->AspectRatioH = link->sample_aspect_ratio.den ? link->sample_aspect_ratio.den : 1;
227
228 return 0;
229 }
230
231 static void clear_unused_frames(QSVFrame *list)
232 {
233 while (list) {
234 if (list->surface && !list->surface->Data.Locked) {
235 list->surface = NULL;
236 av_frame_free(&list->frame);
237 }
238 list = list->next;
239 }
240 }
241
242 static void clear_frame_list(QSVFrame **list)
243 {
244 while (*list) {
245 QSVFrame *frame;
246
247 frame = *list;
248 *list = (*list)->next;
249 av_frame_free(&frame->frame);
250 av_freep(&frame);
251 }
252 }
253
254 static QSVFrame *get_free_frame(QSVFrame **list)
255 {
256 QSVFrame *out = *list;
257
258 for (; out; out = out->next) {
259 if (!out->surface)
260 break;
261 }
262
263 if (!out) {
264 out = av_mallocz(sizeof(*out));
265 if (!out) {
266 av_log(NULL, AV_LOG_ERROR, "Can't alloc new output frame.\n");
267 return NULL;
268 }
269 out->next = *list;
270 *list = out;
271 }
272
273 return out;
274 }
275
276 /* get the input surface */
277 static QSVFrame *submit_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
278 {
279 QSVFrame *qsv_frame;
280 AVFilterContext *ctx = inlink->dst;
281
282 clear_unused_frames(s->in_frame_list);
283
284 qsv_frame = get_free_frame(&s->in_frame_list);
285 if (!qsv_frame)
286 return NULL;
287
288 /* Turn AVFrame into mfxFrameSurface1.
289 * For video/opaque memory mode, pix_fmt is AV_PIX_FMT_QSV, and
290 * mfxFrameSurface1 is stored in AVFrame->data[3];
291 * for system memory mode, raw video data is stored in
292 * AVFrame, we should map it into mfxFrameSurface1.
293 */
294 if (!IS_SYSTEM_MEMORY(s->in_mem_mode)) {
295 if (picref->format != AV_PIX_FMT_QSV) {
296 av_log(ctx, AV_LOG_ERROR, "QSVVPP gets a wrong frame.\n");
297 return NULL;
298 }
299 qsv_frame->frame = picref;
300 qsv_frame->surface = (mfxFrameSurface1 *)qsv_frame->frame->data[3];
301 } else {
302 /* make a copy if the input is not padded as libmfx requires */
303 if (picref->height & 31 || picref->linesize[0] & 31) {
304 qsv_frame->frame = ff_get_video_buffer(inlink,
305 FFALIGN(inlink->w, 32),
306 FFALIGN(inlink->h, 32));
307 if (!qsv_frame->frame)
308 return NULL;
309
310 qsv_frame->frame->width = picref->width;
311 qsv_frame->frame->height = picref->height;
312
313 if (av_frame_copy(qsv_frame->frame, picref) < 0) {
314 av_frame_free(&qsv_frame->frame);
315 return NULL;
316 }
317
318 av_frame_copy_props(qsv_frame->frame, picref);
319 av_frame_free(&picref);
320 } else
321 qsv_frame->frame = picref;
322
323 if (map_frame_to_surface(qsv_frame->frame,
324 &qsv_frame->surface_internal) < 0) {
325 av_log(ctx, AV_LOG_ERROR, "Unsupported frame.\n");
326 return NULL;
327 }
328 qsv_frame->surface = &qsv_frame->surface_internal;
329 }
330
331 qsv_frame->surface->Info = s->frame_infos[FF_INLINK_IDX(inlink)];
332 qsv_frame->surface->Data.TimeStamp = av_rescale_q(qsv_frame->frame->pts,
333 inlink->time_base, default_tb);
334
335 qsv_frame->surface->Info.PicStruct =
336 !qsv_frame->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
337 (qsv_frame->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
338 MFX_PICSTRUCT_FIELD_BFF);
339 if (qsv_frame->frame->repeat_pict == 1)
340 qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
341 else if (qsv_frame->frame->repeat_pict == 2)
342 qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
343 else if (qsv_frame->frame->repeat_pict == 4)
344 qsv_frame->surface->Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
345
346 return qsv_frame;
347 }
348
349 /* get the output surface */
350 static QSVFrame *query_frame(QSVVPPContext *s, AVFilterLink *outlink)
351 {
352 AVFilterContext *ctx = outlink->src;
353 QSVFrame *out_frame;
354 int ret;
355
356 clear_unused_frames(s->out_frame_list);
357
358 out_frame = get_free_frame(&s->out_frame_list);
359 if (!out_frame)
360 return NULL;
361
362 /* For video memory, get a hw frame;
363 * For system memory, get a sw frame and map it into a mfx_surface. */
364 if (!IS_SYSTEM_MEMORY(s->out_mem_mode)) {
365 out_frame->frame = av_frame_alloc();
366 if (!out_frame->frame)
367 return NULL;
368
369 ret = av_hwframe_get_buffer(outlink->hw_frames_ctx, out_frame->frame, 0);
370 if (ret < 0) {
371 av_log(ctx, AV_LOG_ERROR, "Can't allocate a surface.\n");
372 return NULL;
373 }
374
375 out_frame->surface = (mfxFrameSurface1 *)out_frame->frame->data[3];
376 } else {
377 /* Get a frame with aligned dimensions.
378 * Libmfx need system memory being 128x64 aligned */
379 out_frame->frame = ff_get_video_buffer(outlink,
380 FFALIGN(outlink->w, 128),
381 FFALIGN(outlink->h, 64));
382 if (!out_frame->frame)
383 return NULL;
384
385 out_frame->frame->width = outlink->w;
386 out_frame->frame->height = outlink->h;
387
388 ret = map_frame_to_surface(out_frame->frame,
389 &out_frame->surface_internal);
390 if (ret < 0)
391 return NULL;
392
393 out_frame->surface = &out_frame->surface_internal;
394 }
395
396 out_frame->surface->Info = s->vpp_param.vpp.Out;
397
398 return out_frame;
399 }
400
401 /* create the QSV session */
402 static int init_vpp_session(AVFilterContext *avctx, QSVVPPContext *s)
403 {
404 AVFilterLink *inlink = avctx->inputs[0];
405 AVFilterLink *outlink = avctx->outputs[0];
406 AVQSVFramesContext *in_frames_hwctx = NULL;
407 AVQSVFramesContext *out_frames_hwctx = NULL;
408
409 AVBufferRef *device_ref;
410 AVHWDeviceContext *device_ctx;
411 AVQSVDeviceContext *device_hwctx;
412 mfxHDL handle;
413 mfxHandleType handle_type;
414 mfxVersion ver;
415 mfxIMPL impl;
416 int ret, i;
417
418 if (inlink->hw_frames_ctx) {
419 AVHWFramesContext *frames_ctx = (AVHWFramesContext *)inlink->hw_frames_ctx->data;
420
421 device_ref = frames_ctx->device_ref;
422 in_frames_hwctx = frames_ctx->hwctx;
423
424 s->in_mem_mode = in_frames_hwctx->frame_type;
425
426 s->surface_ptrs_in = av_mallocz_array(in_frames_hwctx->nb_surfaces,
427 sizeof(*s->surface_ptrs_in));
428 if (!s->surface_ptrs_in)
429 return AVERROR(ENOMEM);
430
431 for (i = 0; i < in_frames_hwctx->nb_surfaces; i++)
432 s->surface_ptrs_in[i] = in_frames_hwctx->surfaces + i;
433
434 s->nb_surface_ptrs_in = in_frames_hwctx->nb_surfaces;
435 } else if (avctx->hw_device_ctx) {
436 device_ref = avctx->hw_device_ctx;
437 s->in_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
438 } else {
439 av_log(avctx, AV_LOG_ERROR, "No hw context provided.\n");
440 return AVERROR(EINVAL);
441 }
442
443 device_ctx = (AVHWDeviceContext *)device_ref->data;
444 device_hwctx = device_ctx->hwctx;
445
446 if (outlink->format == AV_PIX_FMT_QSV) {
447 AVHWFramesContext *out_frames_ctx;
448 AVBufferRef *out_frames_ref = av_hwframe_ctx_alloc(device_ref);
449 if (!out_frames_ref)
450 return AVERROR(ENOMEM);
451
452 s->out_mem_mode = IS_OPAQUE_MEMORY(s->in_mem_mode) ?
453 MFX_MEMTYPE_OPAQUE_FRAME :
454 MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET;
455
456 out_frames_ctx = (AVHWFramesContext *)out_frames_ref->data;
457 out_frames_hwctx = out_frames_ctx->hwctx;
458
459 out_frames_ctx->format = AV_PIX_FMT_QSV;
460 out_frames_ctx->width = FFALIGN(outlink->w, 32);
461 out_frames_ctx->height = FFALIGN(outlink->h, 32);
462 out_frames_ctx->sw_format = s->out_sw_format;
463 out_frames_ctx->initial_pool_size = 64;
464 out_frames_hwctx->frame_type = s->out_mem_mode;
465
466 ret = av_hwframe_ctx_init(out_frames_ref);
467 if (ret < 0) {
468 av_buffer_unref(&out_frames_ref);
469 av_log(avctx, AV_LOG_ERROR, "Error creating frames_ctx for output pad.\n");
470 return ret;
471 }
472
473 s->surface_ptrs_out = av_mallocz_array(out_frames_hwctx->nb_surfaces,
474 sizeof(*s->surface_ptrs_out));
475 if (!s->surface_ptrs_out) {
476 av_buffer_unref(&out_frames_ref);
477 return AVERROR(ENOMEM);
478 }
479
480 for (i = 0; i < out_frames_hwctx->nb_surfaces; i++)
481 s->surface_ptrs_out[i] = out_frames_hwctx->surfaces + i;
482 s->nb_surface_ptrs_out = out_frames_hwctx->nb_surfaces;
483
484 av_buffer_unref(&outlink->hw_frames_ctx);
485 outlink->hw_frames_ctx = out_frames_ref;
486 } else
487 s->out_mem_mode = MFX_MEMTYPE_SYSTEM_MEMORY;
488
489 /* extract the properties of the "master" session given to us */
490 ret = MFXQueryIMPL(device_hwctx->session, &impl);
491 if (ret == MFX_ERR_NONE)
492 ret = MFXQueryVersion(device_hwctx->session, &ver);
493 if (ret != MFX_ERR_NONE) {
494 av_log(avctx, AV_LOG_ERROR, "Error querying the session attributes\n");
495 return AVERROR_UNKNOWN;
496 }
497
498 for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
499 ret = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
500 if (ret == MFX_ERR_NONE) {
501 handle_type = handle_types[i];
502 break;
503 }
504 }
505
506 /* create a "slave" session with those same properties, to be used for vpp */
507 ret = MFXInit(impl, &ver, &s->session);
508 if (ret != MFX_ERR_NONE) {
509 av_log(avctx, AV_LOG_ERROR, "Error initializing a session for scaling\n");
510 return AVERROR_UNKNOWN;
511 }
512
513 if (handle) {
514 ret = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
515 if (ret != MFX_ERR_NONE)
516 return AVERROR_UNKNOWN;
517 }
518 ret = MFXJoinSession(device_hwctx->session, s->session);
519 if (ret != MFX_ERR_NONE)
520 return AVERROR_UNKNOWN;
521
522 if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
523 s->opaque_alloc.In.Surfaces = s->surface_ptrs_in;
524 s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs_in;
525 s->opaque_alloc.In.Type = s->in_mem_mode;
526
527 s->opaque_alloc.Out.Surfaces = s->surface_ptrs_out;
528 s->opaque_alloc.Out.NumSurface = s->nb_surface_ptrs_out;
529 s->opaque_alloc.Out.Type = s->out_mem_mode;
530
531 s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
532 s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
533 } else if (IS_VIDEO_MEMORY(s->in_mem_mode) || IS_VIDEO_MEMORY(s->out_mem_mode)) {
534 mfxFrameAllocator frame_allocator = {
535 .pthis = s,
536 .Alloc = frame_alloc,
537 .Lock = frame_lock,
538 .Unlock = frame_unlock,
539 .GetHDL = frame_get_hdl,
540 .Free = frame_free,
541 };
542
543 ret = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
544 if (ret != MFX_ERR_NONE)
545 return AVERROR_UNKNOWN;
546 }
547
548 return 0;
549 }
550
551 int ff_qsvvpp_create(AVFilterContext *avctx, QSVVPPContext **vpp, QSVVPPParam *param)
552 {
553 int i;
554 int ret;
555 QSVVPPContext *s;
556
557 s = av_mallocz(sizeof(*s));
558 if (!s)
559 return AVERROR(ENOMEM);
560
561 s->filter_frame = param->filter_frame;
562 if (!s->filter_frame)
563 s->filter_frame = ff_filter_frame;
564 s->out_sw_format = param->out_sw_format;
565
566 /* create the vpp session */
567 ret = init_vpp_session(avctx, s);
568 if (ret < 0)
569 goto failed;
570
571 s->frame_infos = av_mallocz_array(avctx->nb_inputs, sizeof(*s->frame_infos));
572 if (!s->frame_infos) {
573 ret = AVERROR(ENOMEM);
574 goto failed;
575 }
576
577 /* Init each input's information */
578 for (i = 0; i < avctx->nb_inputs; i++) {
579 ret = fill_frameinfo_by_link(&s->frame_infos[i], avctx->inputs[i]);
580 if (ret < 0)
581 goto failed;
582 }
583
584 /* Update input's frame info according to crop */
585 for (i = 0; i < param->num_crop; i++) {
586 QSVVPPCrop *crop = param->crop + i;
587 if (crop->in_idx > avctx->nb_inputs) {
588 ret = AVERROR(EINVAL);
589 goto failed;
590 }
591 s->frame_infos[crop->in_idx].CropX = crop->x;
592 s->frame_infos[crop->in_idx].CropY = crop->y;
593 s->frame_infos[crop->in_idx].CropW = crop->w;
594 s->frame_infos[crop->in_idx].CropH = crop->h;
595 }
596
597 s->vpp_param.vpp.In = s->frame_infos[0];
598
599 ret = fill_frameinfo_by_link(&s->vpp_param.vpp.Out, avctx->outputs[0]);
600 if (ret < 0) {
601 av_log(avctx, AV_LOG_ERROR, "Fail to get frame info from link.\n");
602 goto failed;
603 }
604
605 if (IS_OPAQUE_MEMORY(s->in_mem_mode) || IS_OPAQUE_MEMORY(s->out_mem_mode)) {
606 s->nb_ext_buffers = param->num_ext_buf + 1;
607 s->ext_buffers = av_mallocz_array(s->nb_ext_buffers, sizeof(*s->ext_buffers));
608 if (!s->ext_buffers) {
609 ret = AVERROR(ENOMEM);
610 goto failed;
611 }
612
613 s->ext_buffers[0] = (mfxExtBuffer *)&s->opaque_alloc;
614 for (i = 1; i < param->num_ext_buf; i++)
615 s->ext_buffers[i] = param->ext_buf[i - 1];
616 s->vpp_param.ExtParam = s->ext_buffers;
617 s->vpp_param.NumExtParam = s->nb_ext_buffers;
618 } else {
619 s->vpp_param.NumExtParam = param->num_ext_buf;
620 s->vpp_param.ExtParam = param->ext_buf;
621 }
622
623 s->vpp_param.AsyncDepth = 1;
624
625 if (IS_SYSTEM_MEMORY(s->in_mem_mode))
626 s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_SYSTEM_MEMORY;
627 else if (IS_VIDEO_MEMORY(s->in_mem_mode))
628 s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_VIDEO_MEMORY;
629 else if (IS_OPAQUE_MEMORY(s->in_mem_mode))
630 s->vpp_param.IOPattern |= MFX_IOPATTERN_IN_OPAQUE_MEMORY;
631
632 if (IS_SYSTEM_MEMORY(s->out_mem_mode))
633 s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
634 else if (IS_VIDEO_MEMORY(s->out_mem_mode))
635 s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_VIDEO_MEMORY;
636 else if (IS_OPAQUE_MEMORY(s->out_mem_mode))
637 s->vpp_param.IOPattern |= MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
638
639 ret = MFXVideoVPP_Init(s->session, &s->vpp_param);
640 if (ret < 0) {
641 av_log(avctx, AV_LOG_ERROR, "Failed to create a qsvvpp, ret = %d.\n", ret);
642 goto failed;
643 }
644
645 *vpp = s;
646 return 0;
647
648 failed:
649 ff_qsvvpp_free(&s);
650
651 return ret;
652 }
653
654 int ff_qsvvpp_free(QSVVPPContext **vpp)
655 {
656 QSVVPPContext *s = *vpp;
657
658 if (!s)
659 return 0;
660
661 if (s->session) {
662 MFXVideoVPP_Close(s->session);
663 MFXClose(s->session);
664 }
665
666 /* release all the resources */
667 clear_frame_list(&s->in_frame_list);
668 clear_frame_list(&s->out_frame_list);
669 av_freep(&s->surface_ptrs_in);
670 av_freep(&s->surface_ptrs_out);
671 av_freep(&s->ext_buffers);
672 av_freep(&s->frame_infos);
673 av_freep(vpp);
674
675 return 0;
676 }
677
678 int ff_qsvvpp_filter_frame(QSVVPPContext *s, AVFilterLink *inlink, AVFrame *picref)
679 {
680 AVFilterContext *ctx = inlink->dst;
681 AVFilterLink *outlink = ctx->outputs[0];
682 mfxSyncPoint sync;
683 QSVFrame *in_frame, *out_frame;
684 int ret, filter_ret;
685
686 in_frame = submit_frame(s, inlink, picref);
687 if (!in_frame) {
688 av_log(ctx, AV_LOG_ERROR, "Failed to submit frame on input[%d]\n",
689 FF_INLINK_IDX(inlink));
690 return AVERROR(ENOMEM);
691 }
692
693 do {
694 out_frame = query_frame(s, outlink);
695 if (!out_frame) {
696 av_log(ctx, AV_LOG_ERROR, "Failed to query an output frame.\n");
697 return AVERROR(ENOMEM);
698 }
699
700 do {
701 ret = MFXVideoVPP_RunFrameVPPAsync(s->session, in_frame->surface,
702 out_frame->surface, NULL, &sync);
703 if (ret == MFX_WRN_DEVICE_BUSY)
704 av_usleep(500);
705 } while (ret == MFX_WRN_DEVICE_BUSY);
706
707 if (ret < 0 && ret != MFX_ERR_MORE_SURFACE) {
708 /* Ignore more_data error */
709 if (ret == MFX_ERR_MORE_DATA)
710 ret = AVERROR(EAGAIN);
711 break;
712 }
713
714 if (MFXVideoCORE_SyncOperation(s->session, sync, 1000) < 0)
715 av_log(ctx, AV_LOG_WARNING, "Sync failed.\n");
716
717 out_frame->frame->pts = av_rescale_q(out_frame->surface->Data.TimeStamp,
718 default_tb, outlink->time_base);
719
720 filter_ret = s->filter_frame(outlink, out_frame->frame);
721 if (filter_ret < 0) {
722 av_frame_free(&out_frame->frame);
723 ret = filter_ret;
724 break;
725 }
726 out_frame->frame = NULL;
727 } while(ret == MFX_ERR_MORE_SURFACE);
728
729 return ret;
730 }