Commit | Line | Data |
---|---|---|
ade370a4 MT |
1 | /* |
2 | * This file is part of Libav. | |
3 | * | |
4 | * Libav is free software; you can redistribute it and/or | |
5 | * modify it under the terms of the GNU Lesser General Public | |
6 | * License as published by the Free Software Foundation; either | |
7 | * version 2.1 of the License, or (at your option) any later version. | |
8 | * | |
9 | * Libav is distributed in the hope that it will be useful, | |
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
12 | * Lesser General Public License for more details. | |
13 | * | |
14 | * You should have received a copy of the GNU Lesser General Public | |
15 | * License along with Libav; if not, write to the Free Software | |
16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 | */ | |
18 | ||
19 | #include <string.h> | |
20 | ||
21 | #include <va/va.h> | |
22 | #include <va/va_vpp.h> | |
23 | ||
24 | #include "libavutil/avassert.h" | |
9aa251c9 | 25 | #include "libavutil/common.h" |
ade370a4 MT |
26 | #include "libavutil/hwcontext.h" |
27 | #include "libavutil/hwcontext_vaapi.h" | |
28 | #include "libavutil/mem.h" | |
29 | #include "libavutil/opt.h" | |
30 | #include "libavutil/pixdesc.h" | |
31 | ||
32 | #include "avfilter.h" | |
33 | #include "formats.h" | |
34 | #include "internal.h" | |
35 | #include "video.h" | |
36 | ||
37 | #define MAX_REFERENCES 8 | |
38 | ||
39 | typedef struct DeintVAAPIContext { | |
40 | const AVClass *class; | |
41 | ||
42 | AVVAAPIDeviceContext *hwctx; | |
43 | AVBufferRef *device_ref; | |
44 | ||
45 | int mode; | |
9aa251c9 MT |
46 | int field_rate; |
47 | int auto_enable; | |
ade370a4 MT |
48 | |
49 | int valid_ids; | |
50 | VAConfigID va_config; | |
51 | VAContextID va_context; | |
52 | ||
53 | AVBufferRef *input_frames_ref; | |
54 | AVHWFramesContext *input_frames; | |
55 | ||
56 | AVBufferRef *output_frames_ref; | |
57 | AVHWFramesContext *output_frames; | |
58 | int output_height; | |
59 | int output_width; | |
60 | ||
61 | VAProcFilterCapDeinterlacing | |
62 | deint_caps[VAProcDeinterlacingCount]; | |
63 | int nb_deint_caps; | |
64 | VAProcPipelineCaps pipeline_caps; | |
65 | ||
66 | int queue_depth; | |
67 | int queue_count; | |
68 | AVFrame *frame_queue[MAX_REFERENCES]; | |
9aa251c9 | 69 | int extra_delay_for_timestamps; |
ade370a4 MT |
70 | |
71 | VABufferID filter_buffer; | |
72 | } DeintVAAPIContext; | |
73 | ||
74 | static const char *deint_vaapi_mode_name(int mode) | |
75 | { | |
76 | switch (mode) { | |
77 | #define D(name) case VAProcDeinterlacing ## name: return #name | |
78 | D(Bob); | |
79 | D(Weave); | |
80 | D(MotionAdaptive); | |
81 | D(MotionCompensated); | |
82 | #undef D | |
83 | default: | |
84 | return "Invalid"; | |
85 | } | |
86 | } | |
87 | ||
88 | static int deint_vaapi_query_formats(AVFilterContext *avctx) | |
89 | { | |
90 | enum AVPixelFormat pix_fmts[] = { | |
91 | AV_PIX_FMT_VAAPI, AV_PIX_FMT_NONE, | |
92 | }; | |
93 | ||
94 | ff_formats_ref(ff_make_format_list(pix_fmts), | |
95 | &avctx->inputs[0]->out_formats); | |
96 | ff_formats_ref(ff_make_format_list(pix_fmts), | |
97 | &avctx->outputs[0]->in_formats); | |
98 | ||
99 | return 0; | |
100 | } | |
101 | ||
102 | static int deint_vaapi_pipeline_uninit(AVFilterContext *avctx) | |
103 | { | |
104 | DeintVAAPIContext *ctx = avctx->priv; | |
105 | int i; | |
106 | ||
107 | for (i = 0; i < ctx->queue_count; i++) | |
108 | av_frame_free(&ctx->frame_queue[i]); | |
109 | ctx->queue_count = 0; | |
110 | ||
111 | if (ctx->filter_buffer != VA_INVALID_ID) { | |
112 | vaDestroyBuffer(ctx->hwctx->display, ctx->filter_buffer); | |
113 | ctx->filter_buffer = VA_INVALID_ID; | |
114 | } | |
115 | ||
116 | if (ctx->va_context != VA_INVALID_ID) { | |
117 | vaDestroyContext(ctx->hwctx->display, ctx->va_context); | |
118 | ctx->va_context = VA_INVALID_ID; | |
119 | } | |
120 | ||
121 | if (ctx->va_config != VA_INVALID_ID) { | |
122 | vaDestroyConfig(ctx->hwctx->display, ctx->va_config); | |
123 | ctx->va_config = VA_INVALID_ID; | |
124 | } | |
125 | ||
126 | av_buffer_unref(&ctx->device_ref); | |
127 | ctx->hwctx = NULL; | |
128 | ||
129 | return 0; | |
130 | } | |
131 | ||
132 | static int deint_vaapi_config_input(AVFilterLink *inlink) | |
133 | { | |
134 | AVFilterContext *avctx = inlink->dst; | |
135 | DeintVAAPIContext *ctx = avctx->priv; | |
136 | ||
137 | deint_vaapi_pipeline_uninit(avctx); | |
138 | ||
139 | if (!inlink->hw_frames_ctx) { | |
140 | av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is " | |
141 | "required to associate the processing device.\n"); | |
142 | return AVERROR(EINVAL); | |
143 | } | |
144 | ||
145 | ctx->input_frames_ref = av_buffer_ref(inlink->hw_frames_ctx); | |
146 | ctx->input_frames = (AVHWFramesContext*)ctx->input_frames_ref->data; | |
147 | ||
148 | return 0; | |
149 | } | |
150 | ||
151 | static int deint_vaapi_build_filter_params(AVFilterContext *avctx) | |
152 | { | |
153 | DeintVAAPIContext *ctx = avctx->priv; | |
154 | VAStatus vas; | |
155 | VAProcFilterParameterBufferDeinterlacing params; | |
156 | int i; | |
157 | ||
158 | ctx->nb_deint_caps = VAProcDeinterlacingCount; | |
159 | vas = vaQueryVideoProcFilterCaps(ctx->hwctx->display, | |
160 | ctx->va_context, | |
161 | VAProcFilterDeinterlacing, | |
162 | &ctx->deint_caps, | |
163 | &ctx->nb_deint_caps); | |
164 | if (vas != VA_STATUS_SUCCESS) { | |
165 | av_log(avctx, AV_LOG_ERROR, "Failed to query deinterlacing " | |
166 | "caps: %d (%s).\n", vas, vaErrorStr(vas)); | |
167 | return AVERROR(EIO); | |
168 | } | |
169 | ||
170 | if (ctx->mode == VAProcDeinterlacingNone) { | |
171 | for (i = 0; i < ctx->nb_deint_caps; i++) { | |
172 | if (ctx->deint_caps[i].type > ctx->mode) | |
173 | ctx->mode = ctx->deint_caps[i].type; | |
174 | } | |
175 | av_log(avctx, AV_LOG_VERBOSE, "Picking %d (%s) as default " | |
176 | "deinterlacing mode.\n", ctx->mode, | |
177 | deint_vaapi_mode_name(ctx->mode)); | |
178 | } else { | |
179 | for (i = 0; i < ctx->nb_deint_caps; i++) { | |
180 | if (ctx->deint_caps[i].type == ctx->mode) | |
181 | break; | |
182 | } | |
183 | if (i >= ctx->nb_deint_caps) { | |
184 | av_log(avctx, AV_LOG_ERROR, "Deinterlacing mode %d (%s) is " | |
185 | "not supported.\n", ctx->mode, | |
186 | deint_vaapi_mode_name(ctx->mode)); | |
187 | } | |
188 | } | |
189 | ||
190 | params.type = VAProcFilterDeinterlacing; | |
191 | params.algorithm = ctx->mode; | |
192 | params.flags = 0; | |
193 | ||
194 | av_assert0(ctx->filter_buffer == VA_INVALID_ID); | |
195 | vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context, | |
196 | VAProcFilterParameterBufferType, | |
197 | sizeof(params), 1, ¶ms, | |
198 | &ctx->filter_buffer); | |
199 | if (vas != VA_STATUS_SUCCESS) { | |
200 | av_log(avctx, AV_LOG_ERROR, "Failed to create deinterlace " | |
201 | "parameter buffer: %d (%s).\n", vas, vaErrorStr(vas)); | |
202 | return AVERROR(EIO); | |
203 | } | |
204 | ||
205 | vas = vaQueryVideoProcPipelineCaps(ctx->hwctx->display, | |
206 | ctx->va_context, | |
207 | &ctx->filter_buffer, 1, | |
208 | &ctx->pipeline_caps); | |
209 | if (vas != VA_STATUS_SUCCESS) { | |
210 | av_log(avctx, AV_LOG_ERROR, "Failed to query pipeline " | |
211 | "caps: %d (%s).\n", vas, vaErrorStr(vas)); | |
212 | return AVERROR(EIO); | |
213 | } | |
214 | ||
9aa251c9 MT |
215 | ctx->extra_delay_for_timestamps = ctx->field_rate == 2 && |
216 | ctx->pipeline_caps.num_backward_references == 0; | |
217 | ||
ade370a4 | 218 | ctx->queue_depth = ctx->pipeline_caps.num_backward_references + |
9aa251c9 MT |
219 | ctx->pipeline_caps.num_forward_references + |
220 | ctx->extra_delay_for_timestamps + 1; | |
ade370a4 MT |
221 | if (ctx->queue_depth > MAX_REFERENCES) { |
222 | av_log(avctx, AV_LOG_ERROR, "Pipeline requires too many " | |
223 | "references (%u forward, %u back).\n", | |
224 | ctx->pipeline_caps.num_forward_references, | |
225 | ctx->pipeline_caps.num_backward_references); | |
226 | return AVERROR(ENOSYS); | |
227 | } | |
228 | ||
229 | return 0; | |
230 | } | |
231 | ||
232 | static int deint_vaapi_config_output(AVFilterLink *outlink) | |
233 | { | |
234 | AVFilterContext *avctx = outlink->src; | |
9aa251c9 | 235 | AVFilterLink *inlink = avctx->inputs[0]; |
ade370a4 MT |
236 | DeintVAAPIContext *ctx = avctx->priv; |
237 | AVVAAPIHWConfig *hwconfig = NULL; | |
238 | AVHWFramesConstraints *constraints = NULL; | |
239 | AVVAAPIFramesContext *va_frames; | |
240 | VAStatus vas; | |
241 | int err; | |
242 | ||
243 | deint_vaapi_pipeline_uninit(avctx); | |
244 | ||
245 | av_assert0(ctx->input_frames); | |
246 | ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref); | |
247 | ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx; | |
248 | ||
249 | ctx->output_width = ctx->input_frames->width; | |
250 | ctx->output_height = ctx->input_frames->height; | |
251 | ||
252 | av_assert0(ctx->va_config == VA_INVALID_ID); | |
253 | vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone, | |
254 | VAEntrypointVideoProc, 0, 0, &ctx->va_config); | |
255 | if (vas != VA_STATUS_SUCCESS) { | |
256 | av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline " | |
257 | "config: %d (%s).\n", vas, vaErrorStr(vas)); | |
258 | err = AVERROR(EIO); | |
259 | goto fail; | |
260 | } | |
261 | ||
262 | hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref); | |
263 | if (!hwconfig) { | |
264 | err = AVERROR(ENOMEM); | |
265 | goto fail; | |
266 | } | |
267 | hwconfig->config_id = ctx->va_config; | |
268 | ||
269 | constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref, | |
270 | hwconfig); | |
271 | if (!constraints) { | |
272 | err = AVERROR(ENOMEM); | |
273 | goto fail; | |
274 | } | |
275 | ||
276 | if (ctx->output_width < constraints->min_width || | |
277 | ctx->output_height < constraints->min_height || | |
278 | ctx->output_width > constraints->max_width || | |
279 | ctx->output_height > constraints->max_height) { | |
280 | av_log(avctx, AV_LOG_ERROR, "Hardware does not support " | |
281 | "deinterlacing to size %dx%d " | |
282 | "(constraints: width %d-%d height %d-%d).\n", | |
283 | ctx->output_width, ctx->output_height, | |
284 | constraints->min_width, constraints->max_width, | |
285 | constraints->min_height, constraints->max_height); | |
286 | err = AVERROR(EINVAL); | |
287 | goto fail; | |
288 | } | |
289 | ||
ade370a4 MT |
290 | ctx->output_frames_ref = av_hwframe_ctx_alloc(ctx->device_ref); |
291 | if (!ctx->output_frames_ref) { | |
292 | av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context " | |
293 | "for output.\n"); | |
294 | err = AVERROR(ENOMEM); | |
295 | goto fail; | |
296 | } | |
297 | ||
298 | ctx->output_frames = (AVHWFramesContext*)ctx->output_frames_ref->data; | |
299 | ||
300 | ctx->output_frames->format = AV_PIX_FMT_VAAPI; | |
301 | ctx->output_frames->sw_format = ctx->input_frames->sw_format; | |
302 | ctx->output_frames->width = ctx->output_width; | |
303 | ctx->output_frames->height = ctx->output_height; | |
304 | ||
305 | // The number of output frames we need is determined by what follows | |
306 | // the filter. If it's an encoder with complex frame reference | |
307 | // structures then this could be very high. | |
308 | ctx->output_frames->initial_pool_size = 10; | |
309 | ||
310 | err = av_hwframe_ctx_init(ctx->output_frames_ref); | |
311 | if (err < 0) { | |
312 | av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame " | |
313 | "context for output: %d\n", err); | |
314 | goto fail; | |
315 | } | |
316 | ||
317 | va_frames = ctx->output_frames->hwctx; | |
318 | ||
319 | av_assert0(ctx->va_context == VA_INVALID_ID); | |
320 | vas = vaCreateContext(ctx->hwctx->display, ctx->va_config, | |
321 | ctx->output_width, ctx->output_height, 0, | |
322 | va_frames->surface_ids, va_frames->nb_surfaces, | |
323 | &ctx->va_context); | |
324 | if (vas != VA_STATUS_SUCCESS) { | |
325 | av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline " | |
326 | "context: %d (%s).\n", vas, vaErrorStr(vas)); | |
2d518aec MT |
327 | err = AVERROR(EIO); |
328 | goto fail; | |
ade370a4 MT |
329 | } |
330 | ||
2d518aec MT |
331 | err = deint_vaapi_build_filter_params(avctx); |
332 | if (err < 0) | |
333 | goto fail; | |
334 | ||
9aa251c9 MT |
335 | outlink->w = inlink->w; |
336 | outlink->h = inlink->h; | |
337 | ||
338 | outlink->time_base = av_mul_q(inlink->time_base, | |
339 | (AVRational) { 1, ctx->field_rate }); | |
340 | outlink->frame_rate = av_mul_q(inlink->frame_rate, | |
341 | (AVRational) { ctx->field_rate, 1 }); | |
ade370a4 MT |
342 | |
343 | outlink->hw_frames_ctx = av_buffer_ref(ctx->output_frames_ref); | |
344 | if (!outlink->hw_frames_ctx) { | |
345 | err = AVERROR(ENOMEM); | |
346 | goto fail; | |
347 | } | |
348 | ||
349 | av_freep(&hwconfig); | |
350 | av_hwframe_constraints_free(&constraints); | |
351 | return 0; | |
352 | ||
353 | fail: | |
354 | av_buffer_unref(&ctx->output_frames_ref); | |
355 | av_freep(&hwconfig); | |
356 | av_hwframe_constraints_free(&constraints); | |
357 | return err; | |
358 | } | |
359 | ||
360 | static int vaapi_proc_colour_standard(enum AVColorSpace av_cs) | |
361 | { | |
362 | switch(av_cs) { | |
363 | #define CS(av, va) case AVCOL_SPC_ ## av: return VAProcColorStandard ## va; | |
364 | CS(BT709, BT709); | |
365 | CS(BT470BG, BT470BG); | |
366 | CS(SMPTE170M, SMPTE170M); | |
367 | CS(SMPTE240M, SMPTE240M); | |
368 | #undef CS | |
369 | default: | |
370 | return VAProcColorStandardNone; | |
371 | } | |
372 | } | |
373 | ||
374 | static int deint_vaapi_filter_frame(AVFilterLink *inlink, AVFrame *input_frame) | |
375 | { | |
376 | AVFilterContext *avctx = inlink->dst; | |
377 | AVFilterLink *outlink = avctx->outputs[0]; | |
378 | DeintVAAPIContext *ctx = avctx->priv; | |
379 | AVFrame *output_frame = NULL; | |
380 | VASurfaceID input_surface, output_surface; | |
381 | VASurfaceID backward_references[MAX_REFERENCES]; | |
382 | VASurfaceID forward_references[MAX_REFERENCES]; | |
383 | VAProcPipelineParameterBuffer params; | |
384 | VAProcFilterParameterBufferDeinterlacing *filter_params; | |
385 | VARectangle input_region; | |
386 | VABufferID params_id; | |
387 | VAStatus vas; | |
388 | void *filter_params_addr = NULL; | |
9aa251c9 | 389 | int err, i, field, current_frame_index; |
ade370a4 MT |
390 | |
391 | av_log(avctx, AV_LOG_DEBUG, "Filter input: %s, %ux%u (%"PRId64").\n", | |
392 | av_get_pix_fmt_name(input_frame->format), | |
393 | input_frame->width, input_frame->height, input_frame->pts); | |
394 | ||
395 | if (ctx->queue_count < ctx->queue_depth) { | |
396 | ctx->frame_queue[ctx->queue_count++] = input_frame; | |
397 | if (ctx->queue_count < ctx->queue_depth) { | |
398 | // Need more reference surfaces before we can continue. | |
399 | return 0; | |
400 | } | |
401 | } else { | |
402 | av_frame_free(&ctx->frame_queue[0]); | |
403 | for (i = 0; i + 1 < ctx->queue_count; i++) | |
404 | ctx->frame_queue[i] = ctx->frame_queue[i + 1]; | |
405 | ctx->frame_queue[i] = input_frame; | |
406 | } | |
407 | ||
9aa251c9 MT |
408 | current_frame_index = ctx->pipeline_caps.num_forward_references; |
409 | ||
410 | input_frame = ctx->frame_queue[current_frame_index]; | |
ade370a4 | 411 | input_surface = (VASurfaceID)(uintptr_t)input_frame->data[3]; |
ade370a4 MT |
412 | for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++) |
413 | forward_references[i] = (VASurfaceID)(uintptr_t) | |
9aa251c9 MT |
414 | ctx->frame_queue[current_frame_index - i - 1]->data[3]; |
415 | for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++) | |
416 | backward_references[i] = (VASurfaceID)(uintptr_t) | |
417 | ctx->frame_queue[current_frame_index + i + 1]->data[3]; | |
ade370a4 MT |
418 | |
419 | av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for " | |
420 | "deinterlace input.\n", input_surface); | |
421 | av_log(avctx, AV_LOG_DEBUG, "Backward references:"); | |
422 | for (i = 0; i < ctx->pipeline_caps.num_backward_references; i++) | |
423 | av_log(avctx, AV_LOG_DEBUG, " %#x", backward_references[i]); | |
424 | av_log(avctx, AV_LOG_DEBUG, "\n"); | |
425 | av_log(avctx, AV_LOG_DEBUG, "Forward references:"); | |
426 | for (i = 0; i < ctx->pipeline_caps.num_forward_references; i++) | |
427 | av_log(avctx, AV_LOG_DEBUG, " %#x", forward_references[i]); | |
428 | av_log(avctx, AV_LOG_DEBUG, "\n"); | |
429 | ||
9aa251c9 MT |
430 | for (field = 0; field < ctx->field_rate; field++) { |
431 | output_frame = ff_get_video_buffer(outlink, ctx->output_width, | |
432 | ctx->output_height); | |
433 | if (!output_frame) { | |
434 | err = AVERROR(ENOMEM); | |
435 | goto fail; | |
436 | } | |
ade370a4 | 437 | |
9aa251c9 MT |
438 | output_surface = (VASurfaceID)(uintptr_t)output_frame->data[3]; |
439 | av_log(avctx, AV_LOG_DEBUG, "Using surface %#x for " | |
440 | "deinterlace output.\n", output_surface); | |
441 | ||
442 | memset(¶ms, 0, sizeof(params)); | |
443 | ||
444 | input_region = (VARectangle) { | |
445 | .x = 0, | |
446 | .y = 0, | |
447 | .width = input_frame->width, | |
448 | .height = input_frame->height, | |
449 | }; | |
450 | ||
451 | params.surface = input_surface; | |
452 | params.surface_region = &input_region; | |
453 | params.surface_color_standard = | |
454 | vaapi_proc_colour_standard(input_frame->colorspace); | |
455 | ||
456 | params.output_region = NULL; | |
457 | params.output_background_color = 0xff000000; | |
458 | params.output_color_standard = params.surface_color_standard; | |
459 | ||
460 | params.pipeline_flags = 0; | |
461 | params.filter_flags = VA_FRAME_PICTURE; | |
462 | ||
463 | if (!ctx->auto_enable || input_frame->interlaced_frame) { | |
464 | vas = vaMapBuffer(ctx->hwctx->display, ctx->filter_buffer, | |
465 | &filter_params_addr); | |
466 | if (vas != VA_STATUS_SUCCESS) { | |
467 | av_log(avctx, AV_LOG_ERROR, "Failed to map filter parameter " | |
468 | "buffer: %d (%s).\n", vas, vaErrorStr(vas)); | |
469 | err = AVERROR(EIO); | |
470 | goto fail; | |
471 | } | |
472 | filter_params = filter_params_addr; | |
473 | filter_params->flags = 0; | |
474 | if (input_frame->top_field_first) { | |
475 | filter_params->flags |= field ? VA_DEINTERLACING_BOTTOM_FIELD : 0; | |
476 | } else { | |
477 | filter_params->flags |= VA_DEINTERLACING_BOTTOM_FIELD_FIRST; | |
478 | filter_params->flags |= field ? 0 : VA_DEINTERLACING_BOTTOM_FIELD; | |
479 | } | |
480 | filter_params_addr = NULL; | |
481 | vas = vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer); | |
482 | if (vas != VA_STATUS_SUCCESS) | |
483 | av_log(avctx, AV_LOG_ERROR, "Failed to unmap filter parameter " | |
484 | "buffer: %d (%s).\n", vas, vaErrorStr(vas)); | |
485 | ||
486 | params.filters = &ctx->filter_buffer; | |
487 | params.num_filters = 1; | |
488 | ||
489 | params.forward_references = forward_references; | |
490 | params.num_forward_references = | |
491 | ctx->pipeline_caps.num_forward_references; | |
492 | params.backward_references = backward_references; | |
493 | params.num_backward_references = | |
494 | ctx->pipeline_caps.num_backward_references; | |
495 | ||
496 | } else { | |
497 | params.filters = NULL; | |
498 | params.num_filters = 0; | |
499 | } | |
ade370a4 | 500 | |
9aa251c9 MT |
501 | vas = vaBeginPicture(ctx->hwctx->display, |
502 | ctx->va_context, output_surface); | |
503 | if (vas != VA_STATUS_SUCCESS) { | |
504 | av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: " | |
505 | "%d (%s).\n", vas, vaErrorStr(vas)); | |
506 | err = AVERROR(EIO); | |
507 | goto fail; | |
508 | } | |
ade370a4 | 509 | |
9aa251c9 MT |
510 | vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context, |
511 | VAProcPipelineParameterBufferType, | |
512 | sizeof(params), 1, ¶ms, ¶ms_id); | |
513 | if (vas != VA_STATUS_SUCCESS) { | |
514 | av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: " | |
515 | "%d (%s).\n", vas, vaErrorStr(vas)); | |
516 | err = AVERROR(EIO); | |
517 | goto fail_after_begin; | |
518 | } | |
519 | av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n", | |
520 | params_id); | |
ade370a4 | 521 | |
9aa251c9 MT |
522 | vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context, |
523 | ¶ms_id, 1); | |
524 | if (vas != VA_STATUS_SUCCESS) { | |
525 | av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: " | |
526 | "%d (%s).\n", vas, vaErrorStr(vas)); | |
527 | err = AVERROR(EIO); | |
528 | goto fail_after_begin; | |
529 | } | |
ade370a4 | 530 | |
9aa251c9 MT |
531 | vas = vaEndPicture(ctx->hwctx->display, ctx->va_context); |
532 | if (vas != VA_STATUS_SUCCESS) { | |
533 | av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: " | |
534 | "%d (%s).\n", vas, vaErrorStr(vas)); | |
535 | err = AVERROR(EIO); | |
536 | goto fail_after_render; | |
537 | } | |
ade370a4 | 538 | |
bfc83acf | 539 | if (HAVE_VAAPI_1 || ctx->hwctx->driver_quirks & |
9aa251c9 MT |
540 | AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) { |
541 | vas = vaDestroyBuffer(ctx->hwctx->display, params_id); | |
542 | if (vas != VA_STATUS_SUCCESS) { | |
543 | av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: " | |
544 | "%d (%s).\n", vas, vaErrorStr(vas)); | |
545 | // And ignore. | |
546 | } | |
547 | } | |
ade370a4 | 548 | |
9aa251c9 MT |
549 | err = av_frame_copy_props(output_frame, input_frame); |
550 | if (err < 0) | |
551 | goto fail; | |
ade370a4 | 552 | |
9aa251c9 MT |
553 | if (ctx->field_rate == 2) { |
554 | if (field == 0) | |
555 | output_frame->pts = 2 * input_frame->pts; | |
556 | else | |
557 | output_frame->pts = input_frame->pts + | |
558 | ctx->frame_queue[current_frame_index + 1]->pts; | |
ade370a4 | 559 | } |
9aa251c9 | 560 | output_frame->interlaced_frame = 0; |
ade370a4 | 561 | |
9aa251c9 MT |
562 | av_log(avctx, AV_LOG_DEBUG, "Filter output: %s, %ux%u (%"PRId64").\n", |
563 | av_get_pix_fmt_name(output_frame->format), | |
564 | output_frame->width, output_frame->height, output_frame->pts); | |
ade370a4 | 565 | |
9aa251c9 MT |
566 | err = ff_filter_frame(outlink, output_frame); |
567 | if (err < 0) | |
568 | break; | |
569 | } | |
ade370a4 | 570 | |
9aa251c9 | 571 | return err; |
ade370a4 MT |
572 | |
573 | fail_after_begin: | |
574 | vaRenderPicture(ctx->hwctx->display, ctx->va_context, ¶ms_id, 1); | |
575 | fail_after_render: | |
576 | vaEndPicture(ctx->hwctx->display, ctx->va_context); | |
577 | fail: | |
578 | if (filter_params_addr) | |
579 | vaUnmapBuffer(ctx->hwctx->display, ctx->filter_buffer); | |
580 | av_frame_free(&output_frame); | |
581 | return err; | |
582 | } | |
583 | ||
584 | static av_cold int deint_vaapi_init(AVFilterContext *avctx) | |
585 | { | |
586 | DeintVAAPIContext *ctx = avctx->priv; | |
587 | ||
588 | ctx->va_config = VA_INVALID_ID; | |
589 | ctx->va_context = VA_INVALID_ID; | |
590 | ctx->filter_buffer = VA_INVALID_ID; | |
591 | ctx->valid_ids = 1; | |
592 | ||
593 | return 0; | |
594 | } | |
595 | ||
596 | static av_cold void deint_vaapi_uninit(AVFilterContext *avctx) | |
597 | { | |
598 | DeintVAAPIContext *ctx = avctx->priv; | |
599 | ||
600 | if (ctx->valid_ids) | |
601 | deint_vaapi_pipeline_uninit(avctx); | |
602 | ||
603 | av_buffer_unref(&ctx->input_frames_ref); | |
604 | av_buffer_unref(&ctx->output_frames_ref); | |
605 | av_buffer_unref(&ctx->device_ref); | |
606 | } | |
607 | ||
608 | #define OFFSET(x) offsetof(DeintVAAPIContext, x) | |
609 | #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM) | |
610 | static const AVOption deint_vaapi_options[] = { | |
611 | { "mode", "Deinterlacing mode", | |
612 | OFFSET(mode), AV_OPT_TYPE_INT, { .i64 = VAProcDeinterlacingNone }, | |
613 | VAProcDeinterlacingNone, VAProcDeinterlacingCount - 1, FLAGS, "mode" }, | |
614 | { "default", "Use the highest-numbered (and therefore possibly most advanced) deinterlacing algorithm", | |
615 | 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingNone }, .unit = "mode" }, | |
616 | { "bob", "Use the bob deinterlacing algorithm", | |
617 | 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingBob }, .unit = "mode" }, | |
618 | { "weave", "Use the weave deinterlacing algorithm", | |
619 | 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingWeave }, .unit = "mode" }, | |
620 | { "motion_adaptive", "Use the motion adaptive deinterlacing algorithm", | |
621 | 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionAdaptive }, .unit = "mode" }, | |
622 | { "motion_compensated", "Use the motion compensated deinterlacing algorithm", | |
623 | 0, AV_OPT_TYPE_CONST, { .i64 = VAProcDeinterlacingMotionCompensated }, .unit = "mode" }, | |
9aa251c9 MT |
624 | |
625 | { "rate", "Generate output at frame rate or field rate", | |
626 | OFFSET(field_rate), AV_OPT_TYPE_INT, { .i64 = 1 }, 1, 2, FLAGS, "rate" }, | |
627 | { "frame", "Output at frame rate (one frame of output for each field-pair)", | |
628 | 0, AV_OPT_TYPE_CONST, { .i64 = 1 }, .unit = "rate" }, | |
629 | { "field", "Output at field rate (one frame of output for each field)", | |
630 | 0, AV_OPT_TYPE_CONST, { .i64 = 2 }, .unit = "rate" }, | |
631 | ||
632 | { "auto", "Only deinterlace fields, passing frames through unchanged", | |
633 | OFFSET(auto_enable), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, FLAGS }, | |
634 | ||
ade370a4 MT |
635 | { NULL }, |
636 | }; | |
637 | ||
638 | static const AVClass deint_vaapi_class = { | |
639 | .class_name = "deinterlace_vaapi", | |
640 | .item_name = av_default_item_name, | |
641 | .option = deint_vaapi_options, | |
642 | .version = LIBAVUTIL_VERSION_INT, | |
643 | }; | |
644 | ||
645 | static const AVFilterPad deint_vaapi_inputs[] = { | |
646 | { | |
647 | .name = "default", | |
648 | .type = AVMEDIA_TYPE_VIDEO, | |
649 | .filter_frame = &deint_vaapi_filter_frame, | |
650 | .config_props = &deint_vaapi_config_input, | |
651 | }, | |
652 | { NULL } | |
653 | }; | |
654 | ||
655 | static const AVFilterPad deint_vaapi_outputs[] = { | |
656 | { | |
657 | .name = "default", | |
658 | .type = AVMEDIA_TYPE_VIDEO, | |
659 | .config_props = &deint_vaapi_config_output, | |
660 | }, | |
661 | { NULL } | |
662 | }; | |
663 | ||
664 | AVFilter ff_vf_deinterlace_vaapi = { | |
665 | .name = "deinterlace_vaapi", | |
666 | .description = NULL_IF_CONFIG_SMALL("Deinterlacing of VAAPI surfaces"), | |
667 | .priv_size = sizeof(DeintVAAPIContext), | |
668 | .init = &deint_vaapi_init, | |
669 | .uninit = &deint_vaapi_uninit, | |
670 | .query_formats = &deint_vaapi_query_formats, | |
671 | .inputs = deint_vaapi_inputs, | |
672 | .outputs = deint_vaapi_outputs, | |
673 | .priv_class = &deint_vaapi_class, | |
674 | .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE, | |
675 | }; |