lavfi: Add VAAPI deinterlacer
[libav.git] / libavfilter / vf_deinterlace_vaapi.c
CommitLineData
ade370a4
MT
1/*
2 * This file is part of Libav.
3 *
4 * Libav is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * Libav is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with Libav; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19#include <string.h>
20
21#include <va/va.h>
22#include <va/va_vpp.h>
23
24#include "libavutil/avassert.h"
25#include "libavutil/hwcontext.h"
26#include "libavutil/hwcontext_vaapi.h"
27#include "libavutil/mem.h"
28#include "libavutil/opt.h"
29#include "libavutil/pixdesc.h"
30
31#include "avfilter.h"
32#include "formats.h"
33#include "internal.h"
34#include "video.h"
35
36#define MAX_REFERENCES 8
37
38typedef struct DeintVAAPIContext {
39 const AVClass *class;
40
41 AVVAAPIDeviceContext *hwctx;
42 AVBufferRef *device_ref;
43
44 int mode;
45
46 int valid_ids;
47 VAConfigID va_config;
48 VAContextID va_context;
49
50 AVBufferRef *input_frames_ref;
51 AVHWFramesContext *input_frames;
52
53 AVBufferRef *output_frames_ref;
54 AVHWFramesContext *output_frames;
55 int output_height;
56 int output_width;
57
58 VAProcFilterCapDeinterlacing
59 deint_caps[VAProcDeinterlacingCount];
60 int nb_deint_caps;
61 VAProcPipelineCaps pipeline_caps;
62
63 int queue_depth;
64 int queue_count;
65 AVFrame *frame_queue[MAX_REFERENCES];
66
67 VABufferID filter_buffer;
68} DeintVAAPIContext;
69
70static const char *deint_vaapi_mode_name(int mode)
71{
72 switch (mode) {
73#define D(name) case VAProcDeinterlacing ## name: return #name
74 D(Bob);
75 D(Weave);
76 D(MotionAdaptive);
77 D(MotionCompensated);
78#undef D
79 default:
80 return "Invalid";
81 }
82}
83
84static int deint_vaapi_query_formats(AVFilterContext *avctx)
85{
86 enum AVPixelFormat pix_fmts[] = {
87 AV_PIX_FMT_VAAPI, AV_PIX_FMT_NONE,
88 };
89
90 ff_formats_ref(ff_make_format_list(pix_fmts),
91 &avctx->inputs[0]->out_formats);
92 ff_formats_ref(ff_make_format_list(pix_fmts),
93 &avctx->outputs[0]->in_formats);
94
95 return 0;
96}
97
98static int deint_vaapi_pipeline_uninit(AVFilterContext *avctx)
99{
100 DeintVAAPIContext *ctx = avctx->priv;
101 int i;
102
103 for (i = 0; i < ctx->queue_count; i++)
104 av_frame_free(&ctx->frame_queue[i]);
105 ctx->queue_count = 0;
106
107 if (ctx->filter_buffer != VA_INVALID_ID) {
108 vaDestroyBuffer(ctx->hwctx->display, ctx->filter_buffer);
109 ctx->filter_buffer = VA_INVALID_ID;
110 }
111
112 if (ctx->va_context != VA_INVALID_ID) {
113 vaDestroyContext(ctx->hwctx->display, ctx->va_context);
114 ctx->va_context = VA_INVALID_ID;
115 }
116
117 if (ctx->va_config != VA_INVALID_ID) {
118 vaDestroyConfig(ctx->hwctx->display, ctx->va_config);
119 ctx->va_config = VA_INVALID_ID;
120 }
121
122 av_buffer_unref(&ctx->device_ref);
123 ctx->hwctx = NULL;
124
125 return 0;
126}
127
128static int deint_vaapi_config_input(AVFilterLink *inlink)
129{
130 AVFilterContext *avctx = inlink->dst;
131 DeintVAAPIContext *ctx = avctx->priv;
132
133 deint_vaapi_pipeline_uninit(avctx);
134
135 if (!inlink->hw_frames_ctx) {
136 av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is "
137 "required to associate the processing device.\n");
138 return AVERROR(EINVAL);
139 }
140
141 ctx->input_frames_ref = av_buffer_ref(inlink->hw_frames_ctx);
142 ctx->input_frames = (AVHWFramesContext*)ctx->input_frames_ref->data;
143
144 return 0;
145}
146
147static int deint_vaapi_build_filter_params(AVFilterContext *avctx)
148{
149 DeintVAAPIContext *ctx = avctx->priv;
150 VAStatus vas;
151 VAProcFilterParameterBufferDeinterlacing params;
152 int i;
153
154 ctx->nb_deint_caps = VAProcDeinterlacingCount;
155 vas = vaQueryVideoProcFilterCaps(ctx->hwctx->display,
156 ctx->va_context,
157 VAProcFilterDeinterlacing,
158 &ctx->deint_caps,
159 &ctx->nb_deint_caps);
160 if (vas != VA_STATUS_SUCCESS) {
161 av_log(avctx, AV_LOG_ERROR, "Failed to query deinterlacing "
162 "caps: %d (%s).\n", vas, vaErrorStr(vas));
163 return AVERROR(EIO);
164 }
165
166 if (ctx->mode == VAProcDeinterlacingNone) {
167 for (i = 0; i < ctx->nb_deint_caps; i++) {
168 if (ctx->deint_caps[i].type > ctx->mode)
169 ctx->mode = ctx->deint_caps[i].type;
170 }
171 av_log(avctx, AV_LOG_VERBOSE, "Picking %d (%s) as default "
172 "deinterlacing mode.\n", ctx->mode,
173 deint_vaapi_mode_name(ctx->mode));
174 } else {
175 for (i = 0; i < ctx->nb_deint_caps; i++) {
176 if (ctx->deint_caps[i].type == ctx->mode)
177 break;
178 }
179 if (i >= ctx->nb_deint_caps) {
180 av_log(avctx, AV_LOG_ERROR, "Deinterlacing mode %d (%s) is "
181 "not supported.\n", ctx->mode,
182 deint_vaapi_mode_name(ctx->mode));
183 }
184 }
185
186 params.type = VAProcFilterDeinterlacing;
187 params.algorithm = ctx->mode;
188 params.flags = 0;
189
190 av_assert0(ctx->filter_buffer == VA_INVALID_ID);
191 vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
192 VAProcFilterParameterBufferType,
193 sizeof(params), 1, &params,
194 &ctx->filter_buffer);
195 if (vas != VA_STATUS_SUCCESS) {
196 av_log(avctx, AV_LOG_ERROR, "Failed to create deinterlace "
197 "parameter buffer: %d (%s).\n", vas, vaErrorStr(vas));
198 return AVERROR(EIO);
199 }
200
201 vas = vaQueryVideoProcPipelineCaps(ctx->hwctx->display,
202 ctx->va_context,
203 &ctx->filter_buffer, 1,
204 &ctx->pipeline_caps);
205 if (vas != VA_STATUS_SUCCESS) {
206 av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
207 "caps: %d (%s).\n", vas, vaErrorStr(vas));
208 return AVERROR(EIO);
209 }
210
211 ctx->queue_depth = ctx->pipeline_caps.num_backward_references +
212 ctx->pipeline_caps.num_forward_references + 1;
213 if (ctx->queue_depth > MAX_REFERENCES) {
214 av_log(avctx, AV_LOG_ERROR, "Pipeline requires too many "
215 "references (%u forward, %u back).\n",
216 ctx->pipeline_caps.num_forward_references,
217 ctx->pipeline_caps.num_backward_references);
218 return AVERROR(ENOSYS);
219 }
220
221 return 0;
222}
223
224static int deint_vaapi_config_output(AVFilterLink *outlink)
225{
226 AVFilterContext *avctx = outlink->src;
227 DeintVAAPIContext *ctx = avctx->priv;
228 AVVAAPIHWConfig *hwconfig = NULL;
229 AVHWFramesConstraints *constraints = NULL;
230 AVVAAPIFramesContext *va_frames;
231 VAStatus vas;
232 int err;
233
234 deint_vaapi_pipeline_uninit(avctx);
235
236 av_assert0(ctx->input_frames);
237 ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref);
238 ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx;
239
240 ctx->output_width = ctx->input_frames->width;
241 ctx->output_height = ctx->input_frames->height;
242
243 av_assert0(ctx->va_config == VA_INVALID_ID);
244 vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone,
245 VAEntrypointVideoProc, 0, 0, &ctx->va_config);
246 if (vas != VA_STATUS_SUCCESS) {
247 av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
248 "config: %d (%s).\n", vas, vaErrorStr(vas));
249 err = AVERROR(EIO);
250 goto fail;
251 }
252
253 hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
254 if (!hwconfig) {
255 err = AVERROR(ENOMEM);
256 goto fail;
257 }
258 hwconfig->config_id = ctx->va_config;
259
260 constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref,
261 hwconfig);
262 if (!constraints) {
263 err = AVERROR(ENOMEM);
264 goto fail;
265 }
266
267 if (ctx->output_width < constraints->min_width ||
268 ctx->output_height < constraints->min_height ||
269 ctx->output_width > constraints->max_width ||
270 ctx->output_height > constraints->max_height) {
271 av_log(avctx, AV_LOG_ERROR, "Hardware does not support "
272 "deinterlacing to size %dx%d "
273 "(constraints: width %d-%d height %d-%d).\n",
274 ctx->output_width, ctx->output_height,
275 constraints->min_width, constraints->max_width,
276 constraints->min_height, constraints->max_height);
277 err = AVERROR(EINVAL);
278 goto fail;
279 }
280
281 err = deint_vaapi_build_filter_params(avctx);
282 if (err < 0)
283 goto fail;
284
285 ctx->output_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref);
286 if (!ctx->output_frames_ref) {
287 av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context "
288 "for output.\n");
289 err = AVERROR(ENOMEM);
290 goto fail;
291 }
292
293 ctx->output_frames = (AVHWFramesContext*)ctx->output_frames_ref->data;
294
295 ctx->output_frames->format = AV_PIX_FMT_VAAPI;
296 ctx->output_frames->sw_format = ctx->input_frames->sw_format;
297 ctx->output_frames->width = ctx->output_width;
298 ctx->output_frames->height = ctx->output_height;
299
300 // The number of output frames we need is determined by what follows
301 // the filter. If it's an encoder with complex frame reference
302 // structures then this could be very high.
303 ctx->output_frames->initial_pool_size = 10;
304
305 err = av_hwframe_ctx_init(ctx->output_frames_ref);
306 if (err < 0) {
307 av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame "
308 "context for output: %d\n", err);
309 goto fail;
310 }
311
312 va_frames = ctx->output_frames->hwctx;
313
314 av_assert0(ctx->va_context == VA_INVALID_ID);
315 vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
316 ctx->output_width, ctx->output_height, 0,
317 va_frames->surface_ids, va_frames->nb_surfaces,
318 &ctx->va_context);
319 if (vas != VA_STATUS_SUCCESS) {
320 av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
321 "context: %d (%s).\n", vas, vaErrorStr(vas));
322 return AVERROR(EIO);
323 }
324
325 outlink->w = ctx->output_width;
326 outlink->h = ctx->output_height;
327
328 outlink->hw_frames_ctx = av_buffer_ref(ctx->output_frames_ref);
329 if (!outlink->hw_frames_ctx) {
330 err = AVERROR(ENOMEM);
331 goto fail;
332 }
333
334 av_freep(&hwconfig);
335 av_hwframe_constraints_free(&constraints);
336 return 0;
337
338fail:
339 av_buffer_unref(&ctx->output_frames_ref);
340 av_freep(&hwconfig);
341 av_hwframe_constraints_free(&constraints);
342 return err;
343}
344
345static int vaapi_proc_colour_standard(enum AVColorSpace av_cs)
346{
347 switch(av_cs) {
348#define CS(av, va) case AVCOL_SPC_ ## av: return VAProcColorStandard ## va;
349 CS(BT709, BT709);
350 CS(BT470BG, BT470BG);
351 CS(SMPTE170M, SMPTE170M);
352 CS(SMPTE240M, SMPTE240M);
353#undef CS
354 default:
355 return VAProcColorStandardNone;
356 }
357}
358
359static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
360{
361 AVFilterContext *avctx = inlink->dst;
362 AVFilterLink *outlink = avctx->outputs[0];
363 DeintVAAPIContext *ctx = avctx->priv;
364 AVFrame *output_frame = NULL;
365 VASurfaceID input_surface, output_surface;
366 VASurfaceID backward_references[MAX_REFERENCES];
367 VASurfaceID forward_references[MAX_REFERENCES];
368 VAProcPipelineParameterBuffer params;
369 VAProcFilterParameterBufferDeinterlacing *filter_params;
370 VARectangle input_region;
371 VABufferID params_id;
372 VAStatus vas;
373 void *filter_params_addr = NULL;
374 int err, i;
375
376 av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
377 av_get_pix_fmt_name(input_frame->format),
378 input_frame->width, input_frame->height, input_frame->pts);
379
380 if (ctx->queue_count < ctx->queue_depth) {
381 ctx->frame_queue[ctx->queue_count++] = input_frame;
382 if (ctx->queue_count < ctx->queue_depth) {
383 // Need more reference surfaces before we can continue.
384 return 0;
385 }
386 } else {
387 av_frame_free(&ctx->frame_queue[0]);
388 for (i = 0; i + 1 < ctx->queue_count; i++)
389 ctx->frame_queue[i] = ctx->frame_queue[i + 1];
390 ctx->frame_queue[i] = input_frame;
391 }
392
393 input_frame =
394 ctx->frame_queue[ctx->pipeline_caps.num_backward_references];
395 input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
396 for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
397 backward_references[i] = (VASurfaceID)(uintptr_t)
398 ctx->frame_queue[ctx->pipeline_caps.num_backward_references -
399 i - 1]->data[3];
400 for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
401 forward_references[i] = (VASurfaceID)(uintptr_t)
402 ctx->frame_queue[ctx->pipeline_caps.num_backward_references +
403 i + 1]->data[3];
404
405 av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
406 "deinterlace input.\n", input_surface);
407 av_log(avctx, AV_LOG_DEBUG, "Backward references:");
408 for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
409 av_log(avctx, AV_LOG_DEBUG, " %#x", backward_references[i]);
410 av_log(avctx, AV_LOG_DEBUG, "\n");
411 av_log(avctx, AV_LOG_DEBUG, "Forward references:");
412 for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
413 av_log(avctx, AV_LOG_DEBUG, " %#x", forward_references[i]);
414 av_log(avctx, AV_LOG_DEBUG, "\n");
415
416 output_frame = ff_get_video_buffer(outlink, ctx->output_width,
417 ctx->output_height);
418 if (!output_frame) {
419 err = AVERROR(ENOMEM);
420 goto fail;
421 }
422
423 output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
424 av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
425 "deinterlace output.\n", output_surface);
426
427 memset(&params, 0, sizeof(params));
428
429 input_region = (VARectangle) {
430 .x = 0,
431 .y = 0,
432 .width = input_frame->width,
433 .height = input_frame->height,
434 };
435
436 params.surface = input_surface;
437 params.surface_region = &input_region;
438 params.surface_color_standard =
439 vaapi_proc_colour_standard(input_frame->colorspace);
440
441 params.output_region = NULL;
442 params.output_background_color = 0xff000000;
443 params.output_color_standard = params.surface_color_standard;
444
445 params.pipeline_flags = 0;
446 params.filter_flags = VA_FRAME_PICTURE;
447
448 vas = vaMapBuffer(ctx->hwctx->display, ctx->filter_buffer,
449 &filter_params_addr);
450 if (vas != VA_STATUS_SUCCESS) {
451 av_log(avctx, AV_LOG_ERROR, "Failed to map filter parameter "
452 "buffer: %d (%s).\n", vas, vaErrorStr(vas));
453 err = AVERROR(EIO);
454 goto fail;
455 }
456 filter_params = filter_params_addr;
457 filter_params->flags = 0;
458 if (input_frame->interlaced_frame && !input_frame->top_field_first)
459 filter_params->flags |= VA_DEINTERLACING_BOTTOM_FIELD_FIRST;
460 filter_params_addr = NULL;
461 vas = vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
462 if (vas != VA_STATUS_SUCCESS)
463 av_log(avctx, AV_LOG_ERROR, "Failed to unmap filter parameter "
464 "buffer: %d (%s).\n", vas, vaErrorStr(vas));
465
466 params.filters = &ctx->filter_buffer;
467 params.num_filters = 1;
468
469 params.forward_references = forward_references;
470 params.num_forward_references =
471 ctx->pipeline_caps.num_forward_references;
472 params.backward_references = backward_references;
473 params.num_backward_references =
474 ctx->pipeline_caps.num_backward_references;
475
476 vas = vaBeginPicture(ctx->hwctx->display,
477 ctx->va_context, output_surface);
478 if (vas != VA_STATUS_SUCCESS) {
479 av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
480 "%d (%s).\n", vas, vaErrorStr(vas));
481 err = AVERROR(EIO);
482 goto fail;
483 }
484
485 vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
486 VAProcPipelineParameterBufferType,
487 sizeof(params), 1, &params, &params_id);
488 if (vas != VA_STATUS_SUCCESS) {
489 av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
490 "%d (%s).\n", vas, vaErrorStr(vas));
491 err = AVERROR(EIO);
492 goto fail_after_begin;
493 }
494 av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
495 params_id);
496
497 vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
498 &params_id, 1);
499 if (vas != VA_STATUS_SUCCESS) {
500 av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
501 "%d (%s).\n", vas, vaErrorStr(vas));
502 err = AVERROR(EIO);
503 goto fail_after_begin;
504 }
505
506 vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
507 if (vas != VA_STATUS_SUCCESS) {
508 av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
509 "%d (%s).\n", vas, vaErrorStr(vas));
510 err = AVERROR(EIO);
511 goto fail_after_render;
512 }
513
514 if (ctx->hwctx->driver_quirks &
515 AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
516 vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
517 if (vas != VA_STATUS_SUCCESS) {
518 av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
519 "%d (%s).\n", vas, vaErrorStr(vas));
520 // And ignore.
521 }
522 }
523
524 err = av_frame_copy_props(output_frame, input_frame);
525 if (err < 0)
526 goto fail;
527
528 av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
529 av_get_pix_fmt_name(output_frame->format),
530 output_frame->width, output_frame->height, output_frame->pts);
531
532 return ff_filter_frame(outlink, output_frame);
533
534fail_after_begin:
535 vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
536fail_after_render:
537 vaEndPicture(ctx->hwctx->display, ctx->va_context);
538fail:
539 if (filter_params_addr)
540 vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
541 av_frame_free(&output_frame);
542 return err;
543}
544
545static av_cold int deint_vaapi_init(AVFilterContext *avctx)
546{
547 DeintVAAPIContext *ctx = avctx->priv;
548
549 ctx->va_config = VA_INVALID_ID;
550 ctx->va_context = VA_INVALID_ID;
551 ctx->filter_buffer = VA_INVALID_ID;
552 ctx->valid_ids = 1;
553
554 return 0;
555}
556
557static av_cold void deint_vaapi_uninit(AVFilterContext *avctx)
558{
559 DeintVAAPIContext *ctx = avctx->priv;
560
561 if (ctx->valid_ids)
562 deint_vaapi_pipeline_uninit(avctx);
563
564 av_buffer_unref(&ctx->input_frames_ref);
565 av_buffer_unref(&ctx->output_frames_ref);
566 av_buffer_unref(&ctx->device_ref);
567}
568
569#define OFFSET(x) offsetof(DeintVAAPIContext, x)
570#define FLAGS (AV_OPT_FLAG_VIDEO_PARAM)
571static const AVOption deint_vaapi_options[] = {
572 { "mode", "Deinterlacing mode",
573 OFFSET(mode), AV_OPT_TYPE_INT, { .i64 = VAProcDeinterlacingNone },
574 VAProcDeinterlacingNone, VAProcDeinterlacingCount - 1, FLAGS, "mode" },
575 { "default", "Use the highest-numbered (and therefore possibly most advanced) deinterlacing algorithm",
576 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingNone }, .unit = "mode" },
577 { "bob", "Use the bob deinterlacing algorithm",
578 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingBob }, .unit = "mode" },
579 { "weave", "Use the weave deinterlacing algorithm",
580 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingWeave }, .unit = "mode" },
581 { "motion_adaptive", "Use the motion adaptive deinterlacing algorithm",
582 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionAdaptive }, .unit = "mode" },
583 { "motion_compensated", "Use the motion compensated deinterlacing algorithm",
584 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionCompensated }, .unit = "mode" },
585 { NULL },
586};
587
588static const AVClass deint_vaapi_class = {
589 .class_name = "deinterlace_vaapi",
590 .item_name = av_default_item_name,
591 .option = deint_vaapi_options,
592 .version = LIBAVUTIL_VERSION_INT,
593};
594
595static const AVFilterPad deint_vaapi_inputs[] = {
596 {
597 .name = "default",
598 .type = AVMEDIA_TYPE_VIDEO,
599 .filter_frame = &deint_vaapi_filter_frame,
600 .config_props = &deint_vaapi_config_input,
601 },
602 { NULL }
603};
604
605static const AVFilterPad deint_vaapi_outputs[] = {
606 {
607 .name = "default",
608 .type = AVMEDIA_TYPE_VIDEO,
609 .config_props = &deint_vaapi_config_output,
610 },
611 { NULL }
612};
613
614AVFilter ff_vf_deinterlace_vaapi = {
615 .name = "deinterlace_vaapi",
616 .description = NULL_IF_CONFIG_SMALL("Deinterlacing of VAAPI surfaces"),
617 .priv_size = sizeof(DeintVAAPIContext),
618 .init = &deint_vaapi_init,
619 .uninit = &deint_vaapi_uninit,
620 .query_formats = &deint_vaapi_query_formats,
621 .inputs = deint_vaapi_inputs,
622 .outputs = deint_vaapi_outputs,
623 .priv_class = &deint_vaapi_class,
624 .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
625};