lavfi: Make default get_video_buffer work with hardware frames
[libav.git] / libavfilter / vf_deinterlace_qsv.c
... / ...
CommitLineData
1/*
2 * This file is part of Libav.
3 *
4 * Libav is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * Libav is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with Libav; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19/**
20 * @file
21 * deinterlace video filter - QSV
22 */
23
24#include <mfx/mfxvideo.h>
25
26#include <stdio.h>
27#include <string.h>
28
29#include "libavutil/avstring.h"
30#include "libavutil/common.h"
31#include "libavutil/hwcontext.h"
32#include "libavutil/hwcontext_qsv.h"
33#include "libavutil/internal.h"
34#include "libavutil/mathematics.h"
35#include "libavutil/opt.h"
36#include "libavutil/pixdesc.h"
37#include "libavutil/time.h"
38
39#include "avfilter.h"
40#include "formats.h"
41#include "internal.h"
42#include "video.h"
43
44enum {
45 QSVDEINT_MORE_OUTPUT = 1,
46 QSVDEINT_MORE_INPUT,
47};
48
49typedef struct QSVFrame {
50 AVFrame *frame;
51 mfxFrameSurface1 surface;
52 int used;
53
54 struct QSVFrame *next;
55} QSVFrame;
56
57typedef struct QSVDeintContext {
58 const AVClass *class;
59
60 AVBufferRef *hw_frames_ctx;
61 /* a clone of the main session, used internally for deinterlacing */
62 mfxSession session;
63
64 mfxMemId *mem_ids;
65 int nb_mem_ids;
66
67 mfxFrameSurface1 **surface_ptrs;
68 int nb_surface_ptrs;
69
70 mfxExtOpaqueSurfaceAlloc opaque_alloc;
71 mfxExtBuffer *ext_buffers[1];
72
73 QSVFrame *work_frames;
74
75 int64_t last_pts;
76
77 int got_output_frame;
78 int eof;
79} QSVDeintContext;
80
81static void qsvdeint_uninit(AVFilterContext *ctx)
82{
83 QSVDeintContext *s = ctx->priv;
84 QSVFrame *cur;
85
86 if (s->session) {
87 MFXClose(s->session);
88 s->session = NULL;
89 }
90 av_buffer_unref(&s->hw_frames_ctx);
91
92 cur = s->work_frames;
93 while (cur) {
94 s->work_frames = cur->next;
95 av_frame_free(&cur->frame);
96 av_freep(&cur);
97 cur = s->work_frames;
98 }
99
100 av_freep(&s->mem_ids);
101 s->nb_mem_ids = 0;
102
103 av_freep(&s->surface_ptrs);
104 s->nb_surface_ptrs = 0;
105}
106
107static int qsvdeint_query_formats(AVFilterContext *ctx)
108{
109 static const enum AVPixelFormat pixel_formats[] = {
110 AV_PIX_FMT_QSV, AV_PIX_FMT_NONE,
111 };
112 AVFilterFormats *pix_fmts = ff_make_format_list(pixel_formats);
113
114 ff_set_common_formats(ctx, pix_fmts);
115
116 return 0;
117}
118
119static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req,
120 mfxFrameAllocResponse *resp)
121{
122 AVFilterContext *ctx = pthis;
123 QSVDeintContext *s = ctx->priv;
124
125 if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_PROCESSOR_TARGET) ||
126 !(req->Type & (MFX_MEMTYPE_FROM_VPPIN | MFX_MEMTYPE_FROM_VPPOUT)) ||
127 !(req->Type & MFX_MEMTYPE_EXTERNAL_FRAME))
128 return MFX_ERR_UNSUPPORTED;
129
130 resp->mids = s->mem_ids;
131 resp->NumFrameActual = s->nb_mem_ids;
132
133 return MFX_ERR_NONE;
134}
135
136static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp)
137{
138 return MFX_ERR_NONE;
139}
140
141static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
142{
143 return MFX_ERR_UNSUPPORTED;
144}
145
146static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr)
147{
148 return MFX_ERR_UNSUPPORTED;
149}
150
151static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl)
152{
153 *hdl = mid;
154 return MFX_ERR_NONE;
155}
156
157static const mfxHandleType handle_types[] = {
158 MFX_HANDLE_VA_DISPLAY,
159 MFX_HANDLE_D3D9_DEVICE_MANAGER,
160 MFX_HANDLE_D3D11_DEVICE,
161};
162
163static int init_out_session(AVFilterContext *ctx)
164{
165
166 QSVDeintContext *s = ctx->priv;
167 AVHWFramesContext *hw_frames_ctx = (AVHWFramesContext*)s->hw_frames_ctx->data;
168 AVQSVFramesContext *hw_frames_hwctx = hw_frames_ctx->hwctx;
169 AVQSVDeviceContext *device_hwctx = hw_frames_ctx->device_ctx->hwctx;
170
171 int opaque = !!(hw_frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME);
172
173 mfxHDL handle = NULL;
174 mfxHandleType handle_type;
175 mfxVersion ver;
176 mfxIMPL impl;
177 mfxVideoParam par;
178 mfxStatus err;
179 int i;
180
181 /* extract the properties of the "master" session given to us */
182 err = MFXQueryIMPL(device_hwctx->session, &impl);
183 if (err == MFX_ERR_NONE)
184 err = MFXQueryVersion(device_hwctx->session, &ver);
185 if (err != MFX_ERR_NONE) {
186 av_log(ctx, AV_LOG_ERROR, "Error querying the session attributes\n");
187 return AVERROR_UNKNOWN;
188 }
189
190 for (i = 0; i < FF_ARRAY_ELEMS(handle_types); i++) {
191 err = MFXVideoCORE_GetHandle(device_hwctx->session, handle_types[i], &handle);
192 if (err == MFX_ERR_NONE) {
193 handle_type = handle_types[i];
194 break;
195 }
196 }
197
198 /* create a "slave" session with those same properties, to be used for
199 * actual deinterlacing */
200 err = MFXInit(impl, &ver, &s->session);
201 if (err != MFX_ERR_NONE) {
202 av_log(ctx, AV_LOG_ERROR, "Error initializing a session for deinterlacing\n");
203 return AVERROR_UNKNOWN;
204 }
205
206 if (handle) {
207 err = MFXVideoCORE_SetHandle(s->session, handle_type, handle);
208 if (err != MFX_ERR_NONE)
209 return AVERROR_UNKNOWN;
210 }
211
212 memset(&par, 0, sizeof(par));
213
214 if (opaque) {
215 s->surface_ptrs = av_mallocz_array(hw_frames_hwctx->nb_surfaces,
216 sizeof(*s->surface_ptrs));
217 if (!s->surface_ptrs)
218 return AVERROR(ENOMEM);
219 for (i = 0; i < hw_frames_hwctx->nb_surfaces; i++)
220 s->surface_ptrs[i] = hw_frames_hwctx->surfaces + i;
221 s->nb_surface_ptrs = hw_frames_hwctx->nb_surfaces;
222
223 s->opaque_alloc.In.Surfaces = s->surface_ptrs;
224 s->opaque_alloc.In.NumSurface = s->nb_surface_ptrs;
225 s->opaque_alloc.In.Type = hw_frames_hwctx->frame_type;
226
227 s->opaque_alloc.Out = s->opaque_alloc.In;
228
229 s->opaque_alloc.Header.BufferId = MFX_EXTBUFF_OPAQUE_SURFACE_ALLOCATION;
230 s->opaque_alloc.Header.BufferSz = sizeof(s->opaque_alloc);
231
232 s->ext_buffers[0] = (mfxExtBuffer*)&s->opaque_alloc;
233
234 par.ExtParam = s->ext_buffers;
235 par.NumExtParam = FF_ARRAY_ELEMS(s->ext_buffers);
236
237 par.IOPattern = MFX_IOPATTERN_IN_OPAQUE_MEMORY | MFX_IOPATTERN_OUT_OPAQUE_MEMORY;
238 } else {
239 mfxFrameAllocator frame_allocator = {
240 .pthis = ctx,
241 .Alloc = frame_alloc,
242 .Lock = frame_lock,
243 .Unlock = frame_unlock,
244 .GetHDL = frame_get_hdl,
245 .Free = frame_free,
246 };
247
248 s->mem_ids = av_mallocz_array(hw_frames_hwctx->nb_surfaces,
249 sizeof(*s->mem_ids));
250 if (!s->mem_ids)
251 return AVERROR(ENOMEM);
252 for (i = 0; i < hw_frames_hwctx->nb_surfaces; i++)
253 s->mem_ids[i] = hw_frames_hwctx->surfaces[i].Data.MemId;
254 s->nb_mem_ids = hw_frames_hwctx->nb_surfaces;
255
256 err = MFXVideoCORE_SetFrameAllocator(s->session, &frame_allocator);
257 if (err != MFX_ERR_NONE)
258 return AVERROR_UNKNOWN;
259
260 par.IOPattern = MFX_IOPATTERN_IN_VIDEO_MEMORY | MFX_IOPATTERN_OUT_VIDEO_MEMORY;
261 }
262
263 par.AsyncDepth = 1; // TODO async
264
265 par.vpp.In = hw_frames_hwctx->surfaces[0].Info;
266
267 par.vpp.In.CropW = ctx->inputs[0]->w;
268 par.vpp.In.CropH = ctx->inputs[0]->h;
269
270 if (ctx->inputs[0]->frame_rate.num) {
271 par.vpp.In.FrameRateExtN = ctx->inputs[0]->frame_rate.num;
272 par.vpp.In.FrameRateExtD = ctx->inputs[0]->frame_rate.den;
273 } else {
274 par.vpp.In.FrameRateExtN = ctx->inputs[0]->time_base.num;
275 par.vpp.In.FrameRateExtD = ctx->inputs[0]->time_base.den;
276 }
277
278 par.vpp.Out = par.vpp.In;
279
280 if (ctx->outputs[0]->frame_rate.num) {
281 par.vpp.Out.FrameRateExtN = ctx->outputs[0]->frame_rate.num;
282 par.vpp.Out.FrameRateExtD = ctx->outputs[0]->frame_rate.den;
283 } else {
284 par.vpp.Out.FrameRateExtN = ctx->outputs[0]->time_base.num;
285 par.vpp.Out.FrameRateExtD = ctx->outputs[0]->time_base.den;
286 }
287
288 err = MFXVideoVPP_Init(s->session, &par);
289 if (err != MFX_ERR_NONE) {
290 av_log(ctx, AV_LOG_ERROR, "Error opening the VPP for deinterlacing: %d\n", err);
291 return AVERROR_UNKNOWN;
292 }
293
294 return 0;
295}
296
297static int qsvdeint_config_props(AVFilterLink *outlink)
298{
299 AVFilterContext *ctx = outlink->src;
300 AVFilterLink *inlink = ctx->inputs[0];
301 QSVDeintContext *s = ctx->priv;
302 int ret;
303
304 qsvdeint_uninit(ctx);
305
306 s->last_pts = AV_NOPTS_VALUE;
307 outlink->frame_rate = av_mul_q(inlink->frame_rate,
308 (AVRational){ 2, 1 });
309 outlink->time_base = av_mul_q(inlink->time_base,
310 (AVRational){ 1, 2 });
311
312 /* check that we have a hw context */
313 if (!inlink->hw_frames_ctx) {
314 av_log(ctx, AV_LOG_ERROR, "No hw context provided on input\n");
315 return AVERROR(EINVAL);
316 }
317
318 s->hw_frames_ctx = av_buffer_ref(inlink->hw_frames_ctx);
319 if (!s->hw_frames_ctx)
320 return AVERROR(ENOMEM);
321
322 av_buffer_unref(&outlink->hw_frames_ctx);
323 outlink->hw_frames_ctx = av_buffer_ref(inlink->hw_frames_ctx);
324 if (!outlink->hw_frames_ctx) {
325 qsvdeint_uninit(ctx);
326 return AVERROR(ENOMEM);
327 }
328
329 ret = init_out_session(ctx);
330 if (ret < 0)
331 return ret;
332
333
334 return 0;
335}
336
337static void clear_unused_frames(QSVDeintContext *s)
338{
339 QSVFrame *cur = s->work_frames;
340 while (cur) {
341 if (!cur->surface.Data.Locked) {
342 av_frame_free(&cur->frame);
343 cur->used = 0;
344 }
345 cur = cur->next;
346 }
347}
348
349static int get_free_frame(QSVDeintContext *s, QSVFrame **f)
350{
351 QSVFrame *frame, **last;
352
353 clear_unused_frames(s);
354
355 frame = s->work_frames;
356 last = &s->work_frames;
357 while (frame) {
358 if (!frame->used) {
359 *f = frame;
360 return 0;
361 }
362
363 last = &frame->next;
364 frame = frame->next;
365 }
366
367 frame = av_mallocz(sizeof(*frame));
368 if (!frame)
369 return AVERROR(ENOMEM);
370 *last = frame;
371 *f = frame;
372
373 return 0;
374}
375
376static int submit_frame(AVFilterContext *ctx, AVFrame *frame,
377 mfxFrameSurface1 **surface)
378{
379 QSVDeintContext *s = ctx->priv;
380 QSVFrame *qf;
381 int ret;
382
383 ret = get_free_frame(s, &qf);
384 if (ret < 0)
385 return ret;
386
387 qf->frame = frame;
388
389 qf->surface = *(mfxFrameSurface1*)qf->frame->data[3];
390
391 qf->surface.Data.Locked = 0;
392 qf->surface.Info.CropW = qf->frame->width;
393 qf->surface.Info.CropH = qf->frame->height;
394
395 qf->surface.Info.PicStruct = !qf->frame->interlaced_frame ? MFX_PICSTRUCT_PROGRESSIVE :
396 (qf->frame->top_field_first ? MFX_PICSTRUCT_FIELD_TFF :
397 MFX_PICSTRUCT_FIELD_BFF);
398 if (qf->frame->repeat_pict == 1)
399 qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FIELD_REPEATED;
400 else if (qf->frame->repeat_pict == 2)
401 qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_DOUBLING;
402 else if (qf->frame->repeat_pict == 4)
403 qf->surface.Info.PicStruct |= MFX_PICSTRUCT_FRAME_TRIPLING;
404
405 if (ctx->inputs[0]->frame_rate.num) {
406 qf->surface.Info.FrameRateExtN = ctx->inputs[0]->frame_rate.num;
407 qf->surface.Info.FrameRateExtD = ctx->inputs[0]->frame_rate.den;
408 } else {
409 qf->surface.Info.FrameRateExtN = ctx->inputs[0]->time_base.num;
410 qf->surface.Info.FrameRateExtD = ctx->inputs[0]->time_base.den;
411 }
412
413 qf->surface.Data.TimeStamp = av_rescale_q(qf->frame->pts,
414 ctx->inputs[0]->time_base,
415 (AVRational){1, 90000});
416
417 *surface = &qf->surface;
418 qf->used = 1;
419
420 return 0;
421}
422
423static int process_frame(AVFilterContext *ctx, const AVFrame *in,
424 mfxFrameSurface1 *surf_in)
425{
426 QSVDeintContext *s = ctx->priv;
427 AVFilterLink *inlink = ctx->inputs[0];
428 AVFilterLink *outlink = ctx->outputs[0];
429
430 AVFrame *out;
431 mfxFrameSurface1 *surf_out;
432 mfxSyncPoint sync = NULL;
433 mfxStatus err;
434 int ret, again = 0;
435
436 out = av_frame_alloc();
437 if (!out)
438 return AVERROR(ENOMEM);
439
440 ret = av_hwframe_get_buffer(s->hw_frames_ctx, out, 0);
441 if (ret < 0)
442 goto fail;
443
444 surf_out = (mfxFrameSurface1*)out->data[3];
445 surf_out->Info.CropW = outlink->w;
446 surf_out->Info.CropH = outlink->h;
447 surf_out->Info.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
448
449 do {
450 err = MFXVideoVPP_RunFrameVPPAsync(s->session, surf_in, surf_out,
451 NULL, &sync);
452 if (err == MFX_WRN_DEVICE_BUSY)
453 av_usleep(1);
454 } while (err == MFX_WRN_DEVICE_BUSY);
455
456 if (err == MFX_ERR_MORE_DATA) {
457 av_frame_free(&out);
458 return QSVDEINT_MORE_INPUT;
459 }
460
461 if ((err < 0 && err != MFX_ERR_MORE_SURFACE) || !sync) {
462 av_log(ctx, AV_LOG_ERROR, "Error during deinterlacing: %d\n", err);
463 ret = AVERROR_UNKNOWN;
464 goto fail;
465 }
466 if (err == MFX_ERR_MORE_SURFACE)
467 again = 1;
468
469 do {
470 err = MFXVideoCORE_SyncOperation(s->session, sync, 1000);
471 } while (err == MFX_WRN_IN_EXECUTION);
472 if (err < 0) {
473 av_log(ctx, AV_LOG_ERROR, "Error synchronizing the operation: %d\n", err);
474 ret = AVERROR_UNKNOWN;
475 goto fail;
476 }
477
478 ret = av_frame_copy_props(out, in);
479 if (ret < 0)
480 goto fail;
481
482 out->width = outlink->w;
483 out->height = outlink->h;
484 out->interlaced_frame = 0;
485
486 out->pts = av_rescale_q(out->pts, inlink->time_base, outlink->time_base);
487 if (out->pts == s->last_pts)
488 out->pts++;
489 s->last_pts = out->pts;
490
491 ret = ff_filter_frame(outlink, out);
492 if (ret < 0)
493 return ret;
494
495 return again ? QSVDEINT_MORE_OUTPUT : 0;
496fail:
497 av_frame_free(&out);
498 return ret;
499}
500
501static int qsvdeint_filter_frame(AVFilterLink *link, AVFrame *in)
502{
503 AVFilterContext *ctx = link->dst;
504
505 mfxFrameSurface1 *surf_in;
506 int ret;
507
508 ret = submit_frame(ctx, in, &surf_in);
509 if (ret < 0) {
510 av_frame_free(&in);
511 return ret;
512 }
513
514 do {
515 ret = process_frame(ctx, in, surf_in);
516 if (ret < 0)
517 return ret;
518 } while (ret == QSVDEINT_MORE_OUTPUT);
519
520 return 0;
521}
522
523static int qsvdeint_request_frame(AVFilterLink *outlink)
524{
525 AVFilterContext *ctx = outlink->src;
526 QSVDeintContext *s = ctx->priv;
527 int ret = 0;
528
529 s->got_output_frame = 0;
530 while (ret >= 0 && !s->got_output_frame)
531 ret = ff_request_frame(ctx->inputs[0]);
532
533 return ret;
534}
535
536#define OFFSET(x) offsetof(QSVDeintContext, x)
537#define FLAGS AV_OPT_FLAG_VIDEO_PARAM
538static const AVOption options[] = {
539 { NULL },
540};
541
542static const AVClass qsvdeint_class = {
543 .class_name = "deinterlace_qsv",
544 .item_name = av_default_item_name,
545 .option = options,
546 .version = LIBAVUTIL_VERSION_INT,
547};
548
549static const AVFilterPad qsvdeint_inputs[] = {
550 {
551 .name = "default",
552 .type = AVMEDIA_TYPE_VIDEO,
553 .filter_frame = qsvdeint_filter_frame,
554 },
555 { NULL }
556};
557
558static const AVFilterPad qsvdeint_outputs[] = {
559 {
560 .name = "default",
561 .type = AVMEDIA_TYPE_VIDEO,
562 .config_props = qsvdeint_config_props,
563 .request_frame = qsvdeint_request_frame,
564 },
565 { NULL }
566};
567
568AVFilter ff_vf_deinterlace_qsv = {
569 .name = "deinterlace_qsv",
570 .description = NULL_IF_CONFIG_SMALL("QuickSync video deinterlacing"),
571
572 .uninit = qsvdeint_uninit,
573 .query_formats = qsvdeint_query_formats,
574
575 .priv_size = sizeof(QSVDeintContext),
576 .priv_class = &qsvdeint_class,
577
578 .inputs = qsvdeint_inputs,
579 .outputs = qsvdeint_outputs,
580};