h264_metadata: Add option to delete filler data
[libav.git] / libavfilter / vf_deinterlace_vaapi.c
CommitLineData
ade370a4
MT
1/*
2 * This file is part of Libav.
3 *
4 * Libav is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * Libav is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with Libav; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19#include <string.h>
20
21#include <va/va.h>
22#include <va/va_vpp.h>
23
24#include "libavutil/avassert.h"
9aa251c9 25#include "libavutil/common.h"
ade370a4
MT
26#include "libavutil/hwcontext.h"
27#include "libavutil/hwcontext_vaapi.h"
28#include "libavutil/mem.h"
29#include "libavutil/opt.h"
30#include "libavutil/pixdesc.h"
31
32#include "avfilter.h"
33#include "formats.h"
34#include "internal.h"
35#include "video.h"
36
37#define MAX_REFERENCES 8
38
39typedef struct DeintVAAPIContext {
40 const AVClass *class;
41
42 AVVAAPIDeviceContext *hwctx;
43 AVBufferRef *device_ref;
44
45 int mode;
9aa251c9
MT
46 int field_rate;
47 int auto_enable;
ade370a4
MT
48
49 int valid_ids;
50 VAConfigID va_config;
51 VAContextID va_context;
52
53 AVBufferRef *input_frames_ref;
54 AVHWFramesContext *input_frames;
55
ade370a4
MT
56 int output_height;
57 int output_width;
58
59 VAProcFilterCapDeinterlacing
60 deint_caps[VAProcDeinterlacingCount];
61 int nb_deint_caps;
62 VAProcPipelineCaps pipeline_caps;
63
64 int queue_depth;
65 int queue_count;
66 AVFrame *frame_queue[MAX_REFERENCES];
9aa251c9 67 int extra_delay_for_timestamps;
ade370a4
MT
68
69 VABufferID filter_buffer;
70} DeintVAAPIContext;
71
72static const char *deint_vaapi_mode_name(int mode)
73{
74 switch (mode) {
75#define D(name) case VAProcDeinterlacing ## name: return #name
76 D(Bob);
77 D(Weave);
78 D(MotionAdaptive);
79 D(MotionCompensated);
80#undef D
81 default:
82 return "Invalid";
83 }
84}
85
86static int deint_vaapi_query_formats(AVFilterContext *avctx)
87{
88 enum AVPixelFormat pix_fmts[] = {
89 AV_PIX_FMT_VAAPI, AV_PIX_FMT_NONE,
90 };
91
92 ff_formats_ref(ff_make_format_list(pix_fmts),
93 &avctx->inputs[0]->out_formats);
94 ff_formats_ref(ff_make_format_list(pix_fmts),
95 &avctx->outputs[0]->in_formats);
96
97 return 0;
98}
99
100static int deint_vaapi_pipeline_uninit(AVFilterContext *avctx)
101{
102 DeintVAAPIContext *ctx = avctx->priv;
103 int i;
104
105 for (i = 0; i < ctx->queue_count; i++)
106 av_frame_free(&ctx->frame_queue[i]);
107 ctx->queue_count = 0;
108
109 if (ctx->filter_buffer != VA_INVALID_ID) {
110 vaDestroyBuffer(ctx->hwctx->display, ctx->filter_buffer);
111 ctx->filter_buffer = VA_INVALID_ID;
112 }
113
114 if (ctx->va_context != VA_INVALID_ID) {
115 vaDestroyContext(ctx->hwctx->display, ctx->va_context);
116 ctx->va_context = VA_INVALID_ID;
117 }
118
119 if (ctx->va_config != VA_INVALID_ID) {
120 vaDestroyConfig(ctx->hwctx->display, ctx->va_config);
121 ctx->va_config = VA_INVALID_ID;
122 }
123
124 av_buffer_unref(&ctx->device_ref);
125 ctx->hwctx = NULL;
126
127 return 0;
128}
129
130static int deint_vaapi_config_input(AVFilterLink *inlink)
131{
132 AVFilterContext *avctx = inlink->dst;
133 DeintVAAPIContext *ctx = avctx->priv;
134
135 deint_vaapi_pipeline_uninit(avctx);
136
137 if (!inlink->hw_frames_ctx) {
138 av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is "
139 "required to associate the processing device.\n");
140 return AVERROR(EINVAL);
141 }
142
143 ctx->input_frames_ref = av_buffer_ref(inlink->hw_frames_ctx);
144 ctx->input_frames = (AVHWFramesContext*)ctx->input_frames_ref->data;
145
146 return 0;
147}
148
149static int deint_vaapi_build_filter_params(AVFilterContext *avctx)
150{
151 DeintVAAPIContext *ctx = avctx->priv;
152 VAStatus vas;
153 VAProcFilterParameterBufferDeinterlacing params;
154 int i;
155
156 ctx->nb_deint_caps = VAProcDeinterlacingCount;
157 vas = vaQueryVideoProcFilterCaps(ctx->hwctx->display,
158 ctx->va_context,
159 VAProcFilterDeinterlacing,
160 &ctx->deint_caps,
161 &ctx->nb_deint_caps);
162 if (vas != VA_STATUS_SUCCESS) {
163 av_log(avctx, AV_LOG_ERROR, "Failed to query deinterlacing "
164 "caps: %d (%s).\n", vas, vaErrorStr(vas));
165 return AVERROR(EIO);
166 }
167
168 if (ctx->mode == VAProcDeinterlacingNone) {
169 for (i = 0; i < ctx->nb_deint_caps; i++) {
170 if (ctx->deint_caps[i].type > ctx->mode)
171 ctx->mode = ctx->deint_caps[i].type;
172 }
173 av_log(avctx, AV_LOG_VERBOSE, "Picking %d (%s) as default "
174 "deinterlacing mode.\n", ctx->mode,
175 deint_vaapi_mode_name(ctx->mode));
176 } else {
177 for (i = 0; i < ctx->nb_deint_caps; i++) {
178 if (ctx->deint_caps[i].type == ctx->mode)
179 break;
180 }
181 if (i >= ctx->nb_deint_caps) {
182 av_log(avctx, AV_LOG_ERROR, "Deinterlacing mode %d (%s) is "
183 "not supported.\n", ctx->mode,
184 deint_vaapi_mode_name(ctx->mode));
185 }
186 }
187
188 params.type = VAProcFilterDeinterlacing;
189 params.algorithm = ctx->mode;
190 params.flags = 0;
191
192 av_assert0(ctx->filter_buffer == VA_INVALID_ID);
193 vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
194 VAProcFilterParameterBufferType,
195 sizeof(params), 1, &params,
196 &ctx->filter_buffer);
197 if (vas != VA_STATUS_SUCCESS) {
198 av_log(avctx, AV_LOG_ERROR, "Failed to create deinterlace "
199 "parameter buffer: %d (%s).\n", vas, vaErrorStr(vas));
200 return AVERROR(EIO);
201 }
202
203 vas = vaQueryVideoProcPipelineCaps(ctx->hwctx->display,
204 ctx->va_context,
205 &ctx->filter_buffer, 1,
206 &ctx->pipeline_caps);
207 if (vas != VA_STATUS_SUCCESS) {
208 av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline "
209 "caps: %d (%s).\n", vas, vaErrorStr(vas));
210 return AVERROR(EIO);
211 }
212
9aa251c9
MT
213 ctx->extra_delay_for_timestamps = ctx->field_rate == 2 &&
214 ctx->pipeline_caps.num_backward_references == 0;
215
ade370a4 216 ctx->queue_depth = ctx->pipeline_caps.num_backward_references +
9aa251c9
MT
217 ctx->pipeline_caps.num_forward_references +
218 ctx->extra_delay_for_timestamps + 1;
ade370a4
MT
219 if (ctx->queue_depth > MAX_REFERENCES) {
220 av_log(avctx, AV_LOG_ERROR, "Pipeline requires too many "
221 "references (%u forward, %u back).\n",
222 ctx->pipeline_caps.num_forward_references,
223 ctx->pipeline_caps.num_backward_references);
224 return AVERROR(ENOSYS);
225 }
226
227 return 0;
228}
229
230static int deint_vaapi_config_output(AVFilterLink *outlink)
231{
232 AVFilterContext *avctx = outlink->src;
9aa251c9 233 AVFilterLink *inlink = avctx->inputs[0];
ade370a4
MT
234 DeintVAAPIContext *ctx = avctx->priv;
235 AVVAAPIHWConfig *hwconfig = NULL;
236 AVHWFramesConstraints *constraints = NULL;
b128be17 237 AVHWFramesContext *output_frames;
ade370a4
MT
238 AVVAAPIFramesContext *va_frames;
239 VAStatus vas;
240 int err;
241
242 deint_vaapi_pipeline_uninit(avctx);
243
244 av_assert0(ctx->input_frames);
245 ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref);
246 ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx;
247
248 ctx->output_width = ctx->input_frames->width;
249 ctx->output_height = ctx->input_frames->height;
250
251 av_assert0(ctx->va_config == VA_INVALID_ID);
252 vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone,
253 VAEntrypointVideoProc, 0, 0, &ctx->va_config);
254 if (vas != VA_STATUS_SUCCESS) {
255 av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
256 "config: %d (%s).\n", vas, vaErrorStr(vas));
257 err = AVERROR(EIO);
258 goto fail;
259 }
260
261 hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref);
262 if (!hwconfig) {
263 err = AVERROR(ENOMEM);
264 goto fail;
265 }
266 hwconfig->config_id = ctx->va_config;
267
268 constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref,
269 hwconfig);
270 if (!constraints) {
271 err = AVERROR(ENOMEM);
272 goto fail;
273 }
274
275 if (ctx->output_width < constraints->min_width ||
276 ctx->output_height < constraints->min_height ||
277 ctx->output_width > constraints->max_width ||
278 ctx->output_height > constraints->max_height) {
279 av_log(avctx, AV_LOG_ERROR, "Hardware does not support "
280 "deinterlacing to size %dx%d "
281 "(constraints: width %d-%d height %d-%d).\n",
282 ctx->output_width, ctx->output_height,
283 constraints->min_width, constraints->max_width,
284 constraints->min_height, constraints->max_height);
285 err = AVERROR(EINVAL);
286 goto fail;
287 }
288
b128be17
MT
289 outlink->hw_frames_ctx = av_hwframe_ctx_alloc(ctx->device_ref);
290 if (!outlink->hw_frames_ctx) {
ade370a4
MT
291 av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context "
292 "for output.\n");
293 err = AVERROR(ENOMEM);
294 goto fail;
295 }
296
b128be17 297 output_frames = (AVHWFramesContext*)outlink->hw_frames_ctx->data;
ade370a4 298
b128be17
MT
299 output_frames->format = AV_PIX_FMT_VAAPI;
300 output_frames->sw_format = ctx->input_frames->sw_format;
301 output_frames->width = ctx->output_width;
302 output_frames->height = ctx->output_height;
ade370a4 303
b128be17
MT
304 output_frames->initial_pool_size = 4;
305
306 err = ff_filter_init_hw_frames(avctx, outlink, 10);
307 if (err < 0)
308 goto fail;
ade370a4 309
b128be17 310 err = av_hwframe_ctx_init(outlink->hw_frames_ctx);
ade370a4
MT
311 if (err < 0) {
312 av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame "
313 "context for output: %d\n", err);
314 goto fail;
315 }
316
b128be17 317 va_frames = output_frames->hwctx;
ade370a4
MT
318
319 av_assert0(ctx->va_context == VA_INVALID_ID);
320 vas = vaCreateContext(ctx->hwctx->display, ctx->va_config,
321 ctx->output_width, ctx->output_height, 0,
322 va_frames->surface_ids, va_frames->nb_surfaces,
323 &ctx->va_context);
324 if (vas != VA_STATUS_SUCCESS) {
325 av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline "
326 "context: %d (%s).\n", vas, vaErrorStr(vas));
2d518aec
MT
327 err = AVERROR(EIO);
328 goto fail;
ade370a4
MT
329 }
330
2d518aec
MT
331 err = deint_vaapi_build_filter_params(avctx);
332 if (err < 0)
333 goto fail;
334
9aa251c9
MT
335 outlink->w = inlink->w;
336 outlink->h = inlink->h;
337
338 outlink->time_base = av_mul_q(inlink->time_base,
339 (AVRational) { 1, ctx->field_rate });
340 outlink->frame_rate = av_mul_q(inlink->frame_rate,
341 (AVRational) { ctx->field_rate, 1 });
ade370a4 342
ade370a4
MT
343 av_freep(&hwconfig);
344 av_hwframe_constraints_free(&constraints);
345 return 0;
346
347fail:
b128be17 348 av_buffer_unref(&outlink->hw_frames_ctx);
ade370a4
MT
349 av_freep(&hwconfig);
350 av_hwframe_constraints_free(&constraints);
351 return err;
352}
353
354static int vaapi_proc_colour_standard(enum AVColorSpace av_cs)
355{
356 switch(av_cs) {
357#define CS(av, va) case AVCOL_SPC_ ## av: return VAProcColorStandard ## va;
358 CS(BT709, BT709);
359 CS(BT470BG, BT470BG);
360 CS(SMPTE170M, SMPTE170M);
361 CS(SMPTE240M, SMPTE240M);
362#undef CS
363 default:
364 return VAProcColorStandardNone;
365 }
366}
367
368static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame)
369{
370 AVFilterContext *avctx = inlink->dst;
371 AVFilterLink *outlink = avctx->outputs[0];
372 DeintVAAPIContext *ctx = avctx->priv;
373 AVFrame *output_frame = NULL;
374 VASurfaceID input_surface, output_surface;
375 VASurfaceID backward_references[MAX_REFERENCES];
376 VASurfaceID forward_references[MAX_REFERENCES];
377 VAProcPipelineParameterBuffer params;
378 VAProcFilterParameterBufferDeinterlacing *filter_params;
379 VARectangle input_region;
380 VABufferID params_id;
381 VAStatus vas;
382 void *filter_params_addr = NULL;
9aa251c9 383 int err, i, field, current_frame_index;
ade370a4
MT
384
385 av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n",
386 av_get_pix_fmt_name(input_frame->format),
387 input_frame->width, input_frame->height, input_frame->pts);
388
389 if (ctx->queue_count < ctx->queue_depth) {
390 ctx->frame_queue[ctx->queue_count++] = input_frame;
391 if (ctx->queue_count < ctx->queue_depth) {
392 // Need more reference surfaces before we can continue.
393 return 0;
394 }
395 } else {
396 av_frame_free(&ctx->frame_queue[0]);
397 for (i = 0; i + 1 < ctx->queue_count; i++)
398 ctx->frame_queue[i] = ctx->frame_queue[i + 1];
399 ctx->frame_queue[i] = input_frame;
400 }
401
9aa251c9
MT
402 current_frame_index = ctx->pipeline_caps.num_forward_references;
403
404 input_frame = ctx->frame_queue[current_frame_index];
ade370a4 405 input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3];
ade370a4
MT
406 for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
407 forward_references[i] = (VASurfaceID)(uintptr_t)
9aa251c9
MT
408 ctx->frame_queue[current_frame_index - i - 1]->data[3];
409 for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
410 backward_references[i] = (VASurfaceID)(uintptr_t)
411 ctx->frame_queue[current_frame_index + i + 1]->data[3];
ade370a4
MT
412
413 av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
414 "deinterlace input.\n", input_surface);
415 av_log(avctx, AV_LOG_DEBUG, "Backward references:");
416 for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++)
417 av_log(avctx, AV_LOG_DEBUG, " %#x", backward_references[i]);
418 av_log(avctx, AV_LOG_DEBUG, "\n");
419 av_log(avctx, AV_LOG_DEBUG, "Forward references:");
420 for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++)
421 av_log(avctx, AV_LOG_DEBUG, " %#x", forward_references[i]);
422 av_log(avctx, AV_LOG_DEBUG, "\n");
423
9aa251c9
MT
424 for (field = 0; field < ctx->field_rate; field++) {
425 output_frame = ff_get_video_buffer(outlink, ctx->output_width,
426 ctx->output_height);
427 if (!output_frame) {
428 err = AVERROR(ENOMEM);
429 goto fail;
430 }
ade370a4 431
9aa251c9
MT
432 output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3];
433 av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for "
434 "deinterlace output.\n", output_surface);
435
436 memset(&params, 0, sizeof(params));
437
438 input_region = (VARectangle) {
439 .x = 0,
440 .y = 0,
441 .width = input_frame->width,
442 .height = input_frame->height,
443 };
444
445 params.surface = input_surface;
446 params.surface_region = &input_region;
447 params.surface_color_standard =
448 vaapi_proc_colour_standard(input_frame->colorspace);
449
450 params.output_region = NULL;
451 params.output_background_color = 0xff000000;
452 params.output_color_standard = params.surface_color_standard;
453
454 params.pipeline_flags = 0;
455 params.filter_flags = VA_FRAME_PICTURE;
456
457 if (!ctx->auto_enable || input_frame->interlaced_frame) {
458 vas = vaMapBuffer(ctx->hwctx->display, ctx->filter_buffer,
459 &filter_params_addr);
460 if (vas != VA_STATUS_SUCCESS) {
461 av_log(avctx, AV_LOG_ERROR, "Failed to map filter parameter "
462 "buffer: %d (%s).\n", vas, vaErrorStr(vas));
463 err = AVERROR(EIO);
464 goto fail;
465 }
466 filter_params = filter_params_addr;
467 filter_params->flags = 0;
468 if (input_frame->top_field_first) {
469 filter_params->flags |= field ? VA_DEINTERLACING_BOTTOM_FIELD : 0;
470 } else {
471 filter_params->flags |= VA_DEINTERLACING_BOTTOM_FIELD_FIRST;
472 filter_params->flags |= field ? 0 : VA_DEINTERLACING_BOTTOM_FIELD;
473 }
474 filter_params_addr = NULL;
475 vas = vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
476 if (vas != VA_STATUS_SUCCESS)
477 av_log(avctx, AV_LOG_ERROR, "Failed to unmap filter parameter "
478 "buffer: %d (%s).\n", vas, vaErrorStr(vas));
479
480 params.filters = &ctx->filter_buffer;
481 params.num_filters = 1;
482
483 params.forward_references = forward_references;
484 params.num_forward_references =
485 ctx->pipeline_caps.num_forward_references;
486 params.backward_references = backward_references;
487 params.num_backward_references =
488 ctx->pipeline_caps.num_backward_references;
489
490 } else {
491 params.filters = NULL;
492 params.num_filters = 0;
493 }
ade370a4 494
9aa251c9
MT
495 vas = vaBeginPicture(ctx->hwctx->display,
496 ctx->va_context, output_surface);
497 if (vas != VA_STATUS_SUCCESS) {
498 av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: "
499 "%d (%s).\n", vas, vaErrorStr(vas));
500 err = AVERROR(EIO);
501 goto fail;
502 }
ade370a4 503
9aa251c9
MT
504 vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context,
505 VAProcPipelineParameterBufferType,
506 sizeof(params), 1, &params, &params_id);
507 if (vas != VA_STATUS_SUCCESS) {
508 av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: "
509 "%d (%s).\n", vas, vaErrorStr(vas));
510 err = AVERROR(EIO);
511 goto fail_after_begin;
512 }
513 av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n",
514 params_id);
ade370a4 515
9aa251c9
MT
516 vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context,
517 &params_id, 1);
518 if (vas != VA_STATUS_SUCCESS) {
519 av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: "
520 "%d (%s).\n", vas, vaErrorStr(vas));
521 err = AVERROR(EIO);
522 goto fail_after_begin;
523 }
ade370a4 524
9aa251c9
MT
525 vas = vaEndPicture(ctx->hwctx->display, ctx->va_context);
526 if (vas != VA_STATUS_SUCCESS) {
527 av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: "
528 "%d (%s).\n", vas, vaErrorStr(vas));
529 err = AVERROR(EIO);
530 goto fail_after_render;
531 }
ade370a4 532
bfc83acf 533 if (HAVE_VAAPI_1 || ctx->hwctx->driver_quirks &
9aa251c9
MT
534 AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) {
535 vas = vaDestroyBuffer(ctx->hwctx->display, params_id);
536 if (vas != VA_STATUS_SUCCESS) {
537 av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: "
538 "%d (%s).\n", vas, vaErrorStr(vas));
539 // And ignore.
540 }
541 }
ade370a4 542
9aa251c9
MT
543 err = av_frame_copy_props(output_frame, input_frame);
544 if (err < 0)
545 goto fail;
ade370a4 546
9aa251c9
MT
547 if (ctx->field_rate == 2) {
548 if (field == 0)
549 output_frame->pts = 2 * input_frame->pts;
550 else
551 output_frame->pts = input_frame->pts +
552 ctx->frame_queue[current_frame_index + 1]->pts;
ade370a4 553 }
9aa251c9 554 output_frame->interlaced_frame = 0;
ade370a4 555
9aa251c9
MT
556 av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n",
557 av_get_pix_fmt_name(output_frame->format),
558 output_frame->width, output_frame->height, output_frame->pts);
ade370a4 559
9aa251c9
MT
560 err = ff_filter_frame(outlink, output_frame);
561 if (err < 0)
562 break;
563 }
ade370a4 564
9aa251c9 565 return err;
ade370a4
MT
566
567fail_after_begin:
568 vaRenderPicture(ctx->hwctx->display, ctx->va_context, &params_id, 1);
569fail_after_render:
570 vaEndPicture(ctx->hwctx->display, ctx->va_context);
571fail:
572 if (filter_params_addr)
573 vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer);
574 av_frame_free(&output_frame);
575 return err;
576}
577
578static av_cold int deint_vaapi_init(AVFilterContext *avctx)
579{
580 DeintVAAPIContext *ctx = avctx->priv;
581
582 ctx->va_config = VA_INVALID_ID;
583 ctx->va_context = VA_INVALID_ID;
584 ctx->filter_buffer = VA_INVALID_ID;
585 ctx->valid_ids = 1;
586
587 return 0;
588}
589
590static av_cold void deint_vaapi_uninit(AVFilterContext *avctx)
591{
592 DeintVAAPIContext *ctx = avctx->priv;
593
594 if (ctx->valid_ids)
595 deint_vaapi_pipeline_uninit(avctx);
596
597 av_buffer_unref(&ctx->input_frames_ref);
ade370a4
MT
598 av_buffer_unref(&ctx->device_ref);
599}
600
601#define OFFSET(x) offsetof(DeintVAAPIContext, x)
602#define FLAGS (AV_OPT_FLAG_VIDEO_PARAM)
603static const AVOption deint_vaapi_options[] = {
604 { "mode", "Deinterlacing mode",
605 OFFSET(mode), AV_OPT_TYPE_INT, { .i64 = VAProcDeinterlacingNone },
606 VAProcDeinterlacingNone, VAProcDeinterlacingCount - 1, FLAGS, "mode" },
607 { "default", "Use the highest-numbered (and therefore possibly most advanced) deinterlacing algorithm",
608 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingNone }, .unit = "mode" },
609 { "bob", "Use the bob deinterlacing algorithm",
610 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingBob }, .unit = "mode" },
611 { "weave", "Use the weave deinterlacing algorithm",
612 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingWeave }, .unit = "mode" },
613 { "motion_adaptive", "Use the motion adaptive deinterlacing algorithm",
614 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionAdaptive }, .unit = "mode" },
615 { "motion_compensated", "Use the motion compensated deinterlacing algorithm",
616 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionCompensated }, .unit = "mode" },
9aa251c9
MT
617
618 { "rate", "Generate output at frame rate or field rate",
619 OFFSET(field_rate), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 2, FLAGS, "rate" },
620 { "frame", "Output at frame rate (one frame of output for each field-pair)",
621 0, AV_OPT_TYPE_CONST, { .i64 = 1 }, .unit = "rate" },
622 { "field", "Output at field rate (one frame of output for each field)",
623 0, AV_OPT_TYPE_CONST, { .i64 = 2 }, .unit = "rate" },
624
625 { "auto", "Only deinterlace fields, passing frames through unchanged",
626 OFFSET(auto_enable), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS },
627
ade370a4
MT
628 { NULL },
629};
630
631static const AVClass deint_vaapi_class = {
632 .class_name = "deinterlace_vaapi",
633 .item_name = av_default_item_name,
634 .option = deint_vaapi_options,
635 .version = LIBAVUTIL_VERSION_INT,
636};
637
638static const AVFilterPad deint_vaapi_inputs[] = {
639 {
640 .name = "default",
641 .type = AVMEDIA_TYPE_VIDEO,
642 .filter_frame = &deint_vaapi_filter_frame,
643 .config_props = &deint_vaapi_config_input,
644 },
645 { NULL }
646};
647
648static const AVFilterPad deint_vaapi_outputs[] = {
649 {
650 .name = "default",
651 .type = AVMEDIA_TYPE_VIDEO,
652 .config_props = &deint_vaapi_config_output,
653 },
654 { NULL }
655};
656
657AVFilter ff_vf_deinterlace_vaapi = {
658 .name = "deinterlace_vaapi",
659 .description = NULL_IF_CONFIG_SMALL("Deinterlacing of VAAPI surfaces"),
660 .priv_size = sizeof(DeintVAAPIContext),
661 .init = &deint_vaapi_init,
662 .uninit = &deint_vaapi_uninit,
663 .query_formats = &deint_vaapi_query_formats,
664 .inputs = deint_vaapi_inputs,
665 .outputs = deint_vaapi_outputs,
666 .priv_class = &deint_vaapi_class,
667 .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
668};