2 * This file is part of Libav.
4 * Libav is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
9 * Libav is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with Libav; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 #include <va/va_vpp.h>
24 #include "libavutil/avassert.h"
25 #include "libavutil/common.h"
26 #include "libavutil/hwcontext.h"
27 #include "libavutil/hwcontext_vaapi.h"
28 #include "libavutil/mem.h"
29 #include "libavutil/opt.h"
30 #include "libavutil/pixdesc.h"
37 #define MAX_REFERENCES 8
39 typedef struct DeintVAAPIContext
{
42 AVVAAPIDeviceContext
*hwctx
;
43 AVBufferRef
*device_ref
;
51 VAContextID va_context
;
53 AVBufferRef
*input_frames_ref
;
54 AVHWFramesContext
*input_frames
;
56 AVBufferRef
*output_frames_ref
;
57 AVHWFramesContext
*output_frames
;
61 VAProcFilterCapDeinterlacing
62 deint_caps
[VAProcDeinterlacingCount
];
64 VAProcPipelineCaps pipeline_caps
;
68 AVFrame
*frame_queue
[MAX_REFERENCES
];
69 int extra_delay_for_timestamps
;
71 VABufferID filter_buffer
;
74 static const char *deint_vaapi_mode_name(int mode
)
77 #define D(name) case VAProcDeinterlacing ## name: return #name
88 static int deint_vaapi_query_formats(AVFilterContext
*avctx
)
90 enum AVPixelFormat pix_fmts
[] = {
91 AV_PIX_FMT_VAAPI
, AV_PIX_FMT_NONE
,
94 ff_formats_ref(ff_make_format_list(pix_fmts
),
95 &avctx
->inputs
[0]->out_formats
);
96 ff_formats_ref(ff_make_format_list(pix_fmts
),
97 &avctx
->outputs
[0]->in_formats
);
102 static int deint_vaapi_pipeline_uninit(AVFilterContext
*avctx
)
104 DeintVAAPIContext
*ctx
= avctx
->priv
;
107 for (i
= 0; i
< ctx
->queue_count
; i
++)
108 av_frame_free(&ctx
->frame_queue
[i
]);
109 ctx
->queue_count
= 0;
111 if (ctx
->filter_buffer
!= VA_INVALID_ID
) {
112 vaDestroyBuffer(ctx
->hwctx
->display
, ctx
->filter_buffer
);
113 ctx
->filter_buffer
= VA_INVALID_ID
;
116 if (ctx
->va_context
!= VA_INVALID_ID
) {
117 vaDestroyContext(ctx
->hwctx
->display
, ctx
->va_context
);
118 ctx
->va_context
= VA_INVALID_ID
;
121 if (ctx
->va_config
!= VA_INVALID_ID
) {
122 vaDestroyConfig(ctx
->hwctx
->display
, ctx
->va_config
);
123 ctx
->va_config
= VA_INVALID_ID
;
126 av_buffer_unref(&ctx
->device_ref
);
132 static int deint_vaapi_config_input(AVFilterLink
*inlink
)
134 AVFilterContext
*avctx
= inlink
->dst
;
135 DeintVAAPIContext
*ctx
= avctx
->priv
;
137 deint_vaapi_pipeline_uninit(avctx
);
139 if (!inlink
->hw_frames_ctx
) {
140 av_log(avctx
, AV_LOG_ERROR
, "A hardware frames reference is "
141 "required to associate the processing device.\n");
142 return AVERROR(EINVAL
);
145 ctx
->input_frames_ref
= av_buffer_ref(inlink
->hw_frames_ctx
);
146 ctx
->input_frames
= (AVHWFramesContext
*)ctx
->input_frames_ref
->data
;
151 static int deint_vaapi_build_filter_params(AVFilterContext
*avctx
)
153 DeintVAAPIContext
*ctx
= avctx
->priv
;
155 VAProcFilterParameterBufferDeinterlacing params
;
158 ctx
->nb_deint_caps
= VAProcDeinterlacingCount
;
159 vas
= vaQueryVideoProcFilterCaps(ctx
->hwctx
->display
,
161 VAProcFilterDeinterlacing
,
163 &ctx
->nb_deint_caps
);
164 if (vas
!= VA_STATUS_SUCCESS
) {
165 av_log(avctx
, AV_LOG_ERROR
, "Failed to query deinterlacing "
166 "caps: %d (%s).\n", vas
, vaErrorStr(vas
));
170 if (ctx
->mode
== VAProcDeinterlacingNone
) {
171 for (i
= 0; i
< ctx
->nb_deint_caps
; i
++) {
172 if (ctx
->deint_caps
[i
].type
> ctx
->mode
)
173 ctx
->mode
= ctx
->deint_caps
[i
].type
;
175 av_log(avctx
, AV_LOG_VERBOSE
, "Picking %d (%s) as default "
176 "deinterlacing mode.\n", ctx
->mode
,
177 deint_vaapi_mode_name(ctx
->mode
));
179 for (i
= 0; i
< ctx
->nb_deint_caps
; i
++) {
180 if (ctx
->deint_caps
[i
].type
== ctx
->mode
)
183 if (i
>= ctx
->nb_deint_caps
) {
184 av_log(avctx
, AV_LOG_ERROR
, "Deinterlacing mode %d (%s) is "
185 "not supported.\n", ctx
->mode
,
186 deint_vaapi_mode_name(ctx
->mode
));
190 params
.type
= VAProcFilterDeinterlacing
;
191 params
.algorithm
= ctx
->mode
;
194 av_assert0(ctx
->filter_buffer
== VA_INVALID_ID
);
195 vas
= vaCreateBuffer(ctx
->hwctx
->display
, ctx
->va_context
,
196 VAProcFilterParameterBufferType
,
197 sizeof(params
), 1, ¶ms
,
198 &ctx
->filter_buffer
);
199 if (vas
!= VA_STATUS_SUCCESS
) {
200 av_log(avctx
, AV_LOG_ERROR
, "Failed to create deinterlace "
201 "parameter buffer: %d (%s).\n", vas
, vaErrorStr(vas
));
205 vas
= vaQueryVideoProcPipelineCaps(ctx
->hwctx
->display
,
207 &ctx
->filter_buffer
, 1,
208 &ctx
->pipeline_caps
);
209 if (vas
!= VA_STATUS_SUCCESS
) {
210 av_log(avctx
, AV_LOG_ERROR
, "Failed to query pipeline "
211 "caps: %d (%s).\n", vas
, vaErrorStr(vas
));
215 ctx
->extra_delay_for_timestamps
= ctx
->field_rate
== 2 &&
216 ctx
->pipeline_caps
.num_backward_references
== 0;
218 ctx
->queue_depth
= ctx
->pipeline_caps
.num_backward_references
+
219 ctx
->pipeline_caps
.num_forward_references
+
220 ctx
->extra_delay_for_timestamps
+ 1;
221 if (ctx
->queue_depth
> MAX_REFERENCES
) {
222 av_log(avctx
, AV_LOG_ERROR
, "Pipeline requires too many "
223 "references (%u forward, %u back).\n",
224 ctx
->pipeline_caps
.num_forward_references
,
225 ctx
->pipeline_caps
.num_backward_references
);
226 return AVERROR(ENOSYS
);
232 static int deint_vaapi_config_output(AVFilterLink
*outlink
)
234 AVFilterContext
*avctx
= outlink
->src
;
235 AVFilterLink
*inlink
= avctx
->inputs
[0];
236 DeintVAAPIContext
*ctx
= avctx
->priv
;
237 AVVAAPIHWConfig
*hwconfig
= NULL
;
238 AVHWFramesConstraints
*constraints
= NULL
;
239 AVVAAPIFramesContext
*va_frames
;
243 deint_vaapi_pipeline_uninit(avctx
);
245 av_assert0(ctx
->input_frames
);
246 ctx
->device_ref
= av_buffer_ref(ctx
->input_frames
->device_ref
);
247 ctx
->hwctx
= ((AVHWDeviceContext
*)ctx
->device_ref
->data
)->hwctx
;
249 ctx
->output_width
= ctx
->input_frames
->width
;
250 ctx
->output_height
= ctx
->input_frames
->height
;
252 av_assert0(ctx
->va_config
== VA_INVALID_ID
);
253 vas
= vaCreateConfig(ctx
->hwctx
->display
, VAProfileNone
,
254 VAEntrypointVideoProc
, 0, 0, &ctx
->va_config
);
255 if (vas
!= VA_STATUS_SUCCESS
) {
256 av_log(avctx
, AV_LOG_ERROR
, "Failed to create processing pipeline "
257 "config: %d (%s).\n", vas
, vaErrorStr(vas
));
262 hwconfig
= av_hwdevice_hwconfig_alloc(ctx
->device_ref
);
264 err
= AVERROR(ENOMEM
);
267 hwconfig
->config_id
= ctx
->va_config
;
269 constraints
= av_hwdevice_get_hwframe_constraints(ctx
->device_ref
,
272 err
= AVERROR(ENOMEM
);
276 if (ctx
->output_width
< constraints
->min_width
||
277 ctx
->output_height
< constraints
->min_height
||
278 ctx
->output_width
> constraints
->max_width
||
279 ctx
->output_height
> constraints
->max_height
) {
280 av_log(avctx
, AV_LOG_ERROR
, "Hardware does not support "
281 "deinterlacing to size %dx%d "
282 "(constraints: width %d-%d height %d-%d).\n",
283 ctx
->output_width
, ctx
->output_height
,
284 constraints
->min_width
, constraints
->max_width
,
285 constraints
->min_height
, constraints
->max_height
);
286 err
= AVERROR(EINVAL
);
290 ctx
->output_frames_ref
= av_hwframe_ctx_alloc(ctx
->device_ref
);
291 if (!ctx
->output_frames_ref
) {
292 av_log(avctx
, AV_LOG_ERROR
, "Failed to create HW frame context "
294 err
= AVERROR(ENOMEM
);
298 ctx
->output_frames
= (AVHWFramesContext
*)ctx
->output_frames_ref
->data
;
300 ctx
->output_frames
->format
= AV_PIX_FMT_VAAPI
;
301 ctx
->output_frames
->sw_format
= ctx
->input_frames
->sw_format
;
302 ctx
->output_frames
->width
= ctx
->output_width
;
303 ctx
->output_frames
->height
= ctx
->output_height
;
305 // The number of output frames we need is determined by what follows
306 // the filter. If it's an encoder with complex frame reference
307 // structures then this could be very high.
308 ctx
->output_frames
->initial_pool_size
= 10;
310 err
= av_hwframe_ctx_init(ctx
->output_frames_ref
);
312 av_log(avctx
, AV_LOG_ERROR
, "Failed to initialise VAAPI frame "
313 "context for output: %d\n", err
);
317 va_frames
= ctx
->output_frames
->hwctx
;
319 av_assert0(ctx
->va_context
== VA_INVALID_ID
);
320 vas
= vaCreateContext(ctx
->hwctx
->display
, ctx
->va_config
,
321 ctx
->output_width
, ctx
->output_height
, 0,
322 va_frames
->surface_ids
, va_frames
->nb_surfaces
,
324 if (vas
!= VA_STATUS_SUCCESS
) {
325 av_log(avctx
, AV_LOG_ERROR
, "Failed to create processing pipeline "
326 "context: %d (%s).\n", vas
, vaErrorStr(vas
));
331 err
= deint_vaapi_build_filter_params(avctx
);
335 outlink
->w
= inlink
->w
;
336 outlink
->h
= inlink
->h
;
338 outlink
->time_base
= av_mul_q(inlink
->time_base
,
339 (AVRational
) { 1, ctx
->field_rate
});
340 outlink
->frame_rate
= av_mul_q(inlink
->frame_rate
,
341 (AVRational
) { ctx
->field_rate
, 1 });
343 outlink
->hw_frames_ctx
= av_buffer_ref(ctx
->output_frames_ref
);
344 if (!outlink
->hw_frames_ctx
) {
345 err
= AVERROR(ENOMEM
);
350 av_hwframe_constraints_free(&constraints
);
354 av_buffer_unref(&ctx
->output_frames_ref
);
356 av_hwframe_constraints_free(&constraints
);
360 static int vaapi_proc_colour_standard(enum AVColorSpace av_cs
)
363 #define CS(av, va) case AVCOL_SPC_ ## av: return VAProcColorStandard ## va;
365 CS(BT470BG
, BT470BG
);
366 CS(SMPTE170M
, SMPTE170M
);
367 CS(SMPTE240M
, SMPTE240M
);
370 return VAProcColorStandardNone
;
374 static int deint_vaapi_filter_frame(AVFilterLink
*inlink
, AVFrame
*input_frame
)
376 AVFilterContext
*avctx
= inlink
->dst
;
377 AVFilterLink
*outlink
= avctx
->outputs
[0];
378 DeintVAAPIContext
*ctx
= avctx
->priv
;
379 AVFrame
*output_frame
= NULL
;
380 VASurfaceID input_surface
, output_surface
;
381 VASurfaceID backward_references
[MAX_REFERENCES
];
382 VASurfaceID forward_references
[MAX_REFERENCES
];
383 VAProcPipelineParameterBuffer params
;
384 VAProcFilterParameterBufferDeinterlacing
*filter_params
;
385 VARectangle input_region
;
386 VABufferID params_id
;
388 void *filter_params_addr
= NULL
;
389 int err
, i
, field
, current_frame_index
;
391 av_log(avctx
, AV_LOG_DEBUG
, "Filter input: %s, %ux%u (%"PRId64
").\n",
392 av_get_pix_fmt_name(input_frame
->format
),
393 input_frame
->width
, input_frame
->height
, input_frame
->pts
);
395 if (ctx
->queue_count
< ctx
->queue_depth
) {
396 ctx
->frame_queue
[ctx
->queue_count
++] = input_frame
;
397 if (ctx
->queue_count
< ctx
->queue_depth
) {
398 // Need more reference surfaces before we can continue.
402 av_frame_free(&ctx
->frame_queue
[0]);
403 for (i
= 0; i
+ 1 < ctx
->queue_count
; i
++)
404 ctx
->frame_queue
[i
] = ctx
->frame_queue
[i
+ 1];
405 ctx
->frame_queue
[i
] = input_frame
;
408 current_frame_index
= ctx
->pipeline_caps
.num_forward_references
;
410 input_frame
= ctx
->frame_queue
[current_frame_index
];
411 input_surface
= (VASurfaceID
)(uintptr_t)input_frame
->data
[3];
412 for (i
= 0; i
< ctx
->pipeline_caps
.num_forward_references
; i
++)
413 forward_references
[i
] = (VASurfaceID
)(uintptr_t)
414 ctx
->frame_queue
[current_frame_index
- i
- 1]->data
[3];
415 for (i
= 0; i
< ctx
->pipeline_caps
.num_backward_references
; i
++)
416 backward_references
[i
] = (VASurfaceID
)(uintptr_t)
417 ctx
->frame_queue
[current_frame_index
+ i
+ 1]->data
[3];
419 av_log(avctx
, AV_LOG_DEBUG
, "Using surface %#x for "
420 "deinterlace input.\n", input_surface
);
421 av_log(avctx
, AV_LOG_DEBUG
, "Backward references:");
422 for (i
= 0; i
< ctx
->pipeline_caps
.num_backward_references
; i
++)
423 av_log(avctx
, AV_LOG_DEBUG
, " %#x", backward_references
[i
]);
424 av_log(avctx
, AV_LOG_DEBUG
, "\n");
425 av_log(avctx
, AV_LOG_DEBUG
, "Forward references:");
426 for (i
= 0; i
< ctx
->pipeline_caps
.num_forward_references
; i
++)
427 av_log(avctx
, AV_LOG_DEBUG
, " %#x", forward_references
[i
]);
428 av_log(avctx
, AV_LOG_DEBUG
, "\n");
430 for (field
= 0; field
< ctx
->field_rate
; field
++) {
431 output_frame
= ff_get_video_buffer(outlink
, ctx
->output_width
,
434 err
= AVERROR(ENOMEM
);
438 output_surface
= (VASurfaceID
)(uintptr_t)output_frame
->data
[3];
439 av_log(avctx
, AV_LOG_DEBUG
, "Using surface %#x for "
440 "deinterlace output.\n", output_surface
);
442 memset(¶ms
, 0, sizeof(params
));
444 input_region
= (VARectangle
) {
447 .width
= input_frame
->width
,
448 .height
= input_frame
->height
,
451 params
.surface
= input_surface
;
452 params
.surface_region
= &input_region
;
453 params
.surface_color_standard
=
454 vaapi_proc_colour_standard(input_frame
->colorspace
);
456 params
.output_region
= NULL
;
457 params
.output_background_color
= 0xff000000;
458 params
.output_color_standard
= params
.surface_color_standard
;
460 params
.pipeline_flags
= 0;
461 params
.filter_flags
= VA_FRAME_PICTURE
;
463 if (!ctx
->auto_enable
|| input_frame
->interlaced_frame
) {
464 vas
= vaMapBuffer(ctx
->hwctx
->display
, ctx
->filter_buffer
,
465 &filter_params_addr
);
466 if (vas
!= VA_STATUS_SUCCESS
) {
467 av_log(avctx
, AV_LOG_ERROR
, "Failed to map filter parameter "
468 "buffer: %d (%s).\n", vas
, vaErrorStr(vas
));
472 filter_params
= filter_params_addr
;
473 filter_params
->flags
= 0;
474 if (input_frame
->top_field_first
) {
475 filter_params
->flags
|= field ? VA_DEINTERLACING_BOTTOM_FIELD
: 0;
477 filter_params
->flags
|= VA_DEINTERLACING_BOTTOM_FIELD_FIRST
;
478 filter_params
->flags
|= field ?
0 : VA_DEINTERLACING_BOTTOM_FIELD
;
480 filter_params_addr
= NULL
;
481 vas
= vaUnmapBuffer(ctx
->hwctx
->display
, ctx
->filter_buffer
);
482 if (vas
!= VA_STATUS_SUCCESS
)
483 av_log(avctx
, AV_LOG_ERROR
, "Failed to unmap filter parameter "
484 "buffer: %d (%s).\n", vas
, vaErrorStr(vas
));
486 params
.filters
= &ctx
->filter_buffer
;
487 params
.num_filters
= 1;
489 params
.forward_references
= forward_references
;
490 params
.num_forward_references
=
491 ctx
->pipeline_caps
.num_forward_references
;
492 params
.backward_references
= backward_references
;
493 params
.num_backward_references
=
494 ctx
->pipeline_caps
.num_backward_references
;
497 params
.filters
= NULL
;
498 params
.num_filters
= 0;
501 vas
= vaBeginPicture(ctx
->hwctx
->display
,
502 ctx
->va_context
, output_surface
);
503 if (vas
!= VA_STATUS_SUCCESS
) {
504 av_log(avctx
, AV_LOG_ERROR
, "Failed to attach new picture: "
505 "%d (%s).\n", vas
, vaErrorStr(vas
));
510 vas
= vaCreateBuffer(ctx
->hwctx
->display
, ctx
->va_context
,
511 VAProcPipelineParameterBufferType
,
512 sizeof(params
), 1, ¶ms
, ¶ms_id
);
513 if (vas
!= VA_STATUS_SUCCESS
) {
514 av_log(avctx
, AV_LOG_ERROR
, "Failed to create parameter buffer: "
515 "%d (%s).\n", vas
, vaErrorStr(vas
));
517 goto fail_after_begin
;
519 av_log(avctx
, AV_LOG_DEBUG
, "Pipeline parameter buffer is %#x.\n",
522 vas
= vaRenderPicture(ctx
->hwctx
->display
, ctx
->va_context
,
524 if (vas
!= VA_STATUS_SUCCESS
) {
525 av_log(avctx
, AV_LOG_ERROR
, "Failed to render parameter buffer: "
526 "%d (%s).\n", vas
, vaErrorStr(vas
));
528 goto fail_after_begin
;
531 vas
= vaEndPicture(ctx
->hwctx
->display
, ctx
->va_context
);
532 if (vas
!= VA_STATUS_SUCCESS
) {
533 av_log(avctx
, AV_LOG_ERROR
, "Failed to start picture processing: "
534 "%d (%s).\n", vas
, vaErrorStr(vas
));
536 goto fail_after_render
;
539 if (ctx
->hwctx
->driver_quirks
&
540 AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS
) {
541 vas
= vaDestroyBuffer(ctx
->hwctx
->display
, params_id
);
542 if (vas
!= VA_STATUS_SUCCESS
) {
543 av_log(avctx
, AV_LOG_ERROR
, "Failed to free parameter buffer: "
544 "%d (%s).\n", vas
, vaErrorStr(vas
));
549 err
= av_frame_copy_props(output_frame
, input_frame
);
553 if (ctx
->field_rate
== 2) {
555 output_frame
->pts
= 2 * input_frame
->pts
;
557 output_frame
->pts
= input_frame
->pts
+
558 ctx
->frame_queue
[current_frame_index
+ 1]->pts
;
560 output_frame
->interlaced_frame
= 0;
562 av_log(avctx
, AV_LOG_DEBUG
, "Filter output: %s, %ux%u (%"PRId64
").\n",
563 av_get_pix_fmt_name(output_frame
->format
),
564 output_frame
->width
, output_frame
->height
, output_frame
->pts
);
566 err
= ff_filter_frame(outlink
, output_frame
);
574 vaRenderPicture(ctx
->hwctx
->display
, ctx
->va_context
, ¶ms_id
, 1);
576 vaEndPicture(ctx
->hwctx
->display
, ctx
->va_context
);
578 if (filter_params_addr
)
579 vaUnmapBuffer(ctx
->hwctx
->display
, ctx
->filter_buffer
);
580 av_frame_free(&output_frame
);
584 static av_cold
int deint_vaapi_init(AVFilterContext
*avctx
)
586 DeintVAAPIContext
*ctx
= avctx
->priv
;
588 ctx
->va_config
= VA_INVALID_ID
;
589 ctx
->va_context
= VA_INVALID_ID
;
590 ctx
->filter_buffer
= VA_INVALID_ID
;
596 static av_cold
void deint_vaapi_uninit(AVFilterContext
*avctx
)
598 DeintVAAPIContext
*ctx
= avctx
->priv
;
601 deint_vaapi_pipeline_uninit(avctx
);
603 av_buffer_unref(&ctx
->input_frames_ref
);
604 av_buffer_unref(&ctx
->output_frames_ref
);
605 av_buffer_unref(&ctx
->device_ref
);
608 #define OFFSET(x) offsetof(DeintVAAPIContext, x)
609 #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM)
610 static const AVOption deint_vaapi_options
[] = {
611 { "mode", "Deinterlacing mode",
612 OFFSET(mode
), AV_OPT_TYPE_INT
, { .i64
= VAProcDeinterlacingNone
},
613 VAProcDeinterlacingNone
, VAProcDeinterlacingCount
- 1, FLAGS
, "mode" },
614 { "default", "Use the highest-numbered (and therefore possibly most advanced) deinterlacing algorithm",
615 0, AV_OPT_TYPE_CONST
, { .i64
= VAProcDeinterlacingNone
}, .unit
= "mode" },
616 { "bob", "Use the bob deinterlacing algorithm",
617 0, AV_OPT_TYPE_CONST
, { .i64
= VAProcDeinterlacingBob
}, .unit
= "mode" },
618 { "weave", "Use the weave deinterlacing algorithm",
619 0, AV_OPT_TYPE_CONST
, { .i64
= VAProcDeinterlacingWeave
}, .unit
= "mode" },
620 { "motion_adaptive", "Use the motion adaptive deinterlacing algorithm",
621 0, AV_OPT_TYPE_CONST
, { .i64
= VAProcDeinterlacingMotionAdaptive
}, .unit
= "mode" },
622 { "motion_compensated", "Use the motion compensated deinterlacing algorithm",
623 0, AV_OPT_TYPE_CONST
, { .i64
= VAProcDeinterlacingMotionCompensated
}, .unit
= "mode" },
625 { "rate", "Generate output at frame rate or field rate",
626 OFFSET(field_rate
), AV_OPT_TYPE_INT
, { .i64
= 1 }, 1, 2, FLAGS
, "rate" },
627 { "frame", "Output at frame rate (one frame of output for each field-pair)",
628 0, AV_OPT_TYPE_CONST
, { .i64
= 1 }, .unit
= "rate" },
629 { "field", "Output at field rate (one frame of output for each field)",
630 0, AV_OPT_TYPE_CONST
, { .i64
= 2 }, .unit
= "rate" },
632 { "auto", "Only deinterlace fields, passing frames through unchanged",
633 OFFSET(auto_enable
), AV_OPT_TYPE_INT
, { .i64
= 0 }, 0, 1, FLAGS
},
638 static const AVClass deint_vaapi_class
= {
639 .class_name
= "deinterlace_vaapi",
640 .item_name
= av_default_item_name
,
641 .option
= deint_vaapi_options
,
642 .version
= LIBAVUTIL_VERSION_INT
,
645 static const AVFilterPad deint_vaapi_inputs
[] = {
648 .type
= AVMEDIA_TYPE_VIDEO
,
649 .filter_frame
= &deint_vaapi_filter_frame
,
650 .config_props
= &deint_vaapi_config_input
,
655 static const AVFilterPad deint_vaapi_outputs
[] = {
658 .type
= AVMEDIA_TYPE_VIDEO
,
659 .config_props
= &deint_vaapi_config_output
,
664 AVFilter ff_vf_deinterlace_vaapi
= {
665 .name
= "deinterlace_vaapi",
666 .description
= NULL_IF_CONFIG_SMALL("Deinterlacing of VAAPI surfaces"),
667 .priv_size
= sizeof(DeintVAAPIContext
),
668 .init
= &deint_vaapi_init
,
669 .uninit
= &deint_vaapi_uninit
,
670 .query_formats
= &deint_vaapi_query_formats
,
671 .inputs
= deint_vaapi_inputs
,
672 .outputs
= deint_vaapi_outputs
,
673 .priv_class
= &deint_vaapi_class
,
674 .flags_internal
= FF_FILTER_FLAG_HWFRAME_AWARE
,