2 * This file is part of Libav.
4 * Libav is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
9 * Libav is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with Libav; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22 #include <va/va_vpp.h>
24 #include "libavutil/avassert.h"
25 #include "libavutil/hwcontext.h"
26 #include "libavutil/hwcontext_vaapi.h"
27 #include "libavutil/mem.h"
28 #include "libavutil/opt.h"
29 #include "libavutil/pixdesc.h"
36 typedef struct ScaleVAAPIContext
{
39 AVVAAPIDeviceContext
*hwctx
;
40 AVBufferRef
*device_ref
;
44 VAContextID va_context
;
46 AVBufferRef
*input_frames_ref
;
47 AVHWFramesContext
*input_frames
;
49 AVBufferRef
*output_frames_ref
;
50 AVHWFramesContext
*output_frames
;
52 char *output_format_string
;
53 enum AVPixelFormat output_format
;
60 static int scale_vaapi_query_formats(AVFilterContext
*avctx
)
62 enum AVPixelFormat pix_fmts
[] = {
63 AV_PIX_FMT_VAAPI
, AV_PIX_FMT_NONE
,
66 ff_formats_ref(ff_make_format_list(pix_fmts
),
67 &avctx
->inputs
[0]->out_formats
);
68 ff_formats_ref(ff_make_format_list(pix_fmts
),
69 &avctx
->outputs
[0]->in_formats
);
74 static int scale_vaapi_pipeline_uninit(ScaleVAAPIContext
*ctx
)
76 if (ctx
->va_context
!= VA_INVALID_ID
) {
77 vaDestroyContext(ctx
->hwctx
->display
, ctx
->va_context
);
78 ctx
->va_context
= VA_INVALID_ID
;
81 if (ctx
->va_config
!= VA_INVALID_ID
) {
82 vaDestroyConfig(ctx
->hwctx
->display
, ctx
->va_config
);
83 ctx
->va_config
= VA_INVALID_ID
;
86 av_buffer_unref(&ctx
->output_frames_ref
);
87 av_buffer_unref(&ctx
->device_ref
);
93 static int scale_vaapi_config_input(AVFilterLink
*inlink
)
95 AVFilterContext
*avctx
= inlink
->dst
;
96 ScaleVAAPIContext
*ctx
= avctx
->priv
;
98 scale_vaapi_pipeline_uninit(ctx
);
100 if (!inlink
->hw_frames_ctx
) {
101 av_log(avctx
, AV_LOG_ERROR
, "A hardware frames reference is "
102 "required to associate the processing device.\n");
103 return AVERROR(EINVAL
);
106 ctx
->input_frames_ref
= av_buffer_ref(inlink
->hw_frames_ctx
);
107 ctx
->input_frames
= (AVHWFramesContext
*)ctx
->input_frames_ref
->data
;
112 static int scale_vaapi_config_output(AVFilterLink
*outlink
)
114 AVFilterContext
*avctx
= outlink
->src
;
115 ScaleVAAPIContext
*ctx
= avctx
->priv
;
116 AVVAAPIHWConfig
*hwconfig
= NULL
;
117 AVHWFramesConstraints
*constraints
= NULL
;
118 AVVAAPIFramesContext
*va_frames
;
122 scale_vaapi_pipeline_uninit(ctx
);
124 ctx
->device_ref
= av_buffer_ref(ctx
->input_frames
->device_ref
);
125 ctx
->hwctx
= ((AVHWDeviceContext
*)ctx
->device_ref
->data
)->hwctx
;
127 av_assert0(ctx
->va_config
== VA_INVALID_ID
);
128 vas
= vaCreateConfig(ctx
->hwctx
->display
, VAProfileNone
,
129 VAEntrypointVideoProc
, 0, 0, &ctx
->va_config
);
130 if (vas
!= VA_STATUS_SUCCESS
) {
131 av_log(ctx
, AV_LOG_ERROR
, "Failed to create processing pipeline "
132 "config: %d (%s).\n", vas
, vaErrorStr(vas
));
137 hwconfig
= av_hwdevice_hwconfig_alloc(ctx
->device_ref
);
139 err
= AVERROR(ENOMEM
);
142 hwconfig
->config_id
= ctx
->va_config
;
144 constraints
= av_hwdevice_get_hwframe_constraints(ctx
->device_ref
,
147 err
= AVERROR(ENOMEM
);
151 if (ctx
->output_format
== AV_PIX_FMT_NONE
)
152 ctx
->output_format
= ctx
->input_frames
->sw_format
;
153 if (constraints
->valid_sw_formats
) {
154 for (i
= 0; constraints
->valid_sw_formats
[i
] != AV_PIX_FMT_NONE
; i
++) {
155 if (ctx
->output_format
== constraints
->valid_sw_formats
[i
])
158 if (constraints
->valid_sw_formats
[i
] == AV_PIX_FMT_NONE
) {
159 av_log(ctx
, AV_LOG_ERROR
, "Hardware does not support output "
160 "format %s.\n", av_get_pix_fmt_name(ctx
->output_format
));
161 err
= AVERROR(EINVAL
);
166 if (ctx
->output_width
< constraints
->min_width
||
167 ctx
->output_height
< constraints
->min_height
||
168 ctx
->output_width
> constraints
->max_width
||
169 ctx
->output_height
> constraints
->max_height
) {
170 av_log(ctx
, AV_LOG_ERROR
, "Hardware does not support scaling to "
171 "size %dx%d (constraints: width %d-%d height %d-%d).\n",
172 ctx
->output_width
, ctx
->output_height
,
173 constraints
->min_width
, constraints
->max_width
,
174 constraints
->min_height
, constraints
->max_height
);
175 err
= AVERROR(EINVAL
);
179 ctx
->output_frames_ref
= av_hwframe_ctx_alloc(ctx
->device_ref
);
180 if (!ctx
->output_frames_ref
) {
181 av_log(ctx
, AV_LOG_ERROR
, "Failed to create HW frame context "
183 err
= AVERROR(ENOMEM
);
187 ctx
->output_frames
= (AVHWFramesContext
*)ctx
->output_frames_ref
->data
;
189 ctx
->output_frames
->format
= AV_PIX_FMT_VAAPI
;
190 ctx
->output_frames
->sw_format
= ctx
->output_format
;
191 ctx
->output_frames
->width
= ctx
->output_width
;
192 ctx
->output_frames
->height
= ctx
->output_height
;
194 // The number of output frames we need is determined by what follows
195 // the filter. If it's an encoder with complex frame reference
196 // structures then this could be very high.
197 ctx
->output_frames
->initial_pool_size
= 10;
199 err
= av_hwframe_ctx_init(ctx
->output_frames_ref
);
201 av_log(ctx
, AV_LOG_ERROR
, "Failed to initialise VAAPI frame "
202 "context for output: %d\n", err
);
206 va_frames
= ctx
->output_frames
->hwctx
;
208 av_assert0(ctx
->va_context
== VA_INVALID_ID
);
209 vas
= vaCreateContext(ctx
->hwctx
->display
, ctx
->va_config
,
210 ctx
->output_width
, ctx
->output_height
,
212 va_frames
->surface_ids
, va_frames
->nb_surfaces
,
214 if (vas
!= VA_STATUS_SUCCESS
) {
215 av_log(ctx
, AV_LOG_ERROR
, "Failed to create processing pipeline "
216 "context: %d (%s).\n", vas
, vaErrorStr(vas
));
220 outlink
->w
= ctx
->output_width
;
221 outlink
->h
= ctx
->output_height
;
223 outlink
->hw_frames_ctx
= av_buffer_ref(ctx
->output_frames_ref
);
224 if (!outlink
->hw_frames_ctx
) {
225 err
= AVERROR(ENOMEM
);
230 av_hwframe_constraints_free(&constraints
);
234 av_buffer_unref(&ctx
->output_frames_ref
);
236 av_hwframe_constraints_free(&constraints
);
240 static int vaapi_proc_colour_standard(enum AVColorSpace av_cs
)
243 #define CS(av, va) case AVCOL_SPC_ ## av: return VAProcColorStandard ## va;
246 CS(SMPTE170M
, SMPTE170M
);
247 CS(SMPTE240M
, SMPTE240M
);
250 return VAProcColorStandardNone
;
254 static int scale_vaapi_filter_frame(AVFilterLink
*inlink
, AVFrame
*input_frame
)
256 AVFilterContext
*avctx
= inlink
->dst
;
257 AVFilterLink
*outlink
= avctx
->outputs
[0];
258 ScaleVAAPIContext
*ctx
= avctx
->priv
;
259 AVFrame
*output_frame
= NULL
;
260 VASurfaceID input_surface
, output_surface
;
261 VAProcPipelineParameterBuffer params
;
262 VABufferID params_id
;
263 VARectangle input_region
;
267 av_log(ctx
, AV_LOG_DEBUG
, "Filter input: %s, %ux%u (%"PRId64
").\n",
268 av_get_pix_fmt_name(input_frame
->format
),
269 input_frame
->width
, input_frame
->height
, input_frame
->pts
);
271 if (ctx
->va_context
== VA_INVALID_ID
)
272 return AVERROR(EINVAL
);
274 input_surface
= (VASurfaceID
)(uintptr_t)input_frame
->data
[3];
275 av_log(ctx
, AV_LOG_DEBUG
, "Using surface %#x for scale input.\n",
278 output_frame
= ff_get_video_buffer(outlink
, ctx
->output_width
,
281 err
= AVERROR(ENOMEM
);
285 output_surface
= (VASurfaceID
)(uintptr_t)output_frame
->data
[3];
286 av_log(ctx
, AV_LOG_DEBUG
, "Using surface %#x for scale output.\n",
289 memset(¶ms
, 0, sizeof(params
));
291 // If there were top/left cropping, it could be taken into
293 input_region
= (VARectangle
) {
296 .width
= input_frame
->width
,
297 .height
= input_frame
->height
,
300 params
.surface
= input_surface
;
301 params
.surface_region
= &input_region
;
302 params
.surface_color_standard
=
303 vaapi_proc_colour_standard(input_frame
->colorspace
);
305 params
.output_region
= 0;
306 params
.output_background_color
= 0xff000000;
307 params
.output_color_standard
= params
.surface_color_standard
;
309 params
.pipeline_flags
= 0;
310 params
.filter_flags
= VA_FILTER_SCALING_HQ
;
312 vas
= vaBeginPicture(ctx
->hwctx
->display
,
313 ctx
->va_context
, output_surface
);
314 if (vas
!= VA_STATUS_SUCCESS
) {
315 av_log(ctx
, AV_LOG_ERROR
, "Failed to attach new picture: "
316 "%d (%s).\n", vas
, vaErrorStr(vas
));
321 vas
= vaCreateBuffer(ctx
->hwctx
->display
, ctx
->va_context
,
322 VAProcPipelineParameterBufferType
,
323 sizeof(params
), 1, ¶ms
, ¶ms_id
);
324 if (vas
!= VA_STATUS_SUCCESS
) {
325 av_log(ctx
, AV_LOG_ERROR
, "Failed to create parameter buffer: "
326 "%d (%s).\n", vas
, vaErrorStr(vas
));
328 goto fail_after_begin
;
330 av_log(ctx
, AV_LOG_DEBUG
, "Pipeline parameter buffer is %#x.\n",
333 vas
= vaRenderPicture(ctx
->hwctx
->display
, ctx
->va_context
,
335 if (vas
!= VA_STATUS_SUCCESS
) {
336 av_log(ctx
, AV_LOG_ERROR
, "Failed to render parameter buffer: "
337 "%d (%s).\n", vas
, vaErrorStr(vas
));
339 goto fail_after_begin
;
342 vas
= vaEndPicture(ctx
->hwctx
->display
, ctx
->va_context
);
343 if (vas
!= VA_STATUS_SUCCESS
) {
344 av_log(ctx
, AV_LOG_ERROR
, "Failed to start picture processing: "
345 "%d (%s).\n", vas
, vaErrorStr(vas
));
347 goto fail_after_render
;
350 if (HAVE_VAAPI_1
|| ctx
->hwctx
->driver_quirks
&
351 AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS
) {
352 vas
= vaDestroyBuffer(ctx
->hwctx
->display
, params_id
);
353 if (vas
!= VA_STATUS_SUCCESS
) {
354 av_log(ctx
, AV_LOG_ERROR
, "Failed to free parameter buffer: "
355 "%d (%s).\n", vas
, vaErrorStr(vas
));
360 av_frame_copy_props(output_frame
, input_frame
);
361 av_frame_free(&input_frame
);
363 av_log(ctx
, AV_LOG_DEBUG
, "Filter output: %s, %ux%u (%"PRId64
").\n",
364 av_get_pix_fmt_name(output_frame
->format
),
365 output_frame
->width
, output_frame
->height
, output_frame
->pts
);
367 return ff_filter_frame(outlink
, output_frame
);
369 // We want to make sure that if vaBeginPicture has been called, we also
370 // call vaRenderPicture and vaEndPicture. These calls may well fail or
371 // do something else nasty, but once we're in this failure case there
372 // isn't much else we can do.
374 vaRenderPicture(ctx
->hwctx
->display
, ctx
->va_context
, ¶ms_id
, 1);
376 vaEndPicture(ctx
->hwctx
->display
, ctx
->va_context
);
378 av_frame_free(&input_frame
);
379 av_frame_free(&output_frame
);
383 static av_cold
int scale_vaapi_init(AVFilterContext
*avctx
)
385 ScaleVAAPIContext
*ctx
= avctx
->priv
;
387 ctx
->va_config
= VA_INVALID_ID
;
388 ctx
->va_context
= VA_INVALID_ID
;
391 if (ctx
->output_format_string
) {
392 ctx
->output_format
= av_get_pix_fmt(ctx
->output_format_string
);
393 if (ctx
->output_format
== AV_PIX_FMT_NONE
) {
394 av_log(ctx
, AV_LOG_ERROR
, "Invalid output format.\n");
395 return AVERROR(EINVAL
);
398 // Use the input format once that is configured.
399 ctx
->output_format
= AV_PIX_FMT_NONE
;
405 static av_cold
void scale_vaapi_uninit(AVFilterContext
*avctx
)
407 ScaleVAAPIContext
*ctx
= avctx
->priv
;
410 scale_vaapi_pipeline_uninit(ctx
);
412 av_buffer_unref(&ctx
->input_frames_ref
);
413 av_buffer_unref(&ctx
->output_frames_ref
);
414 av_buffer_unref(&ctx
->device_ref
);
418 #define OFFSET(x) offsetof(ScaleVAAPIContext, x)
419 #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM)
420 static const AVOption scale_vaapi_options
[] = {
421 { "w", "Output video width",
422 OFFSET(output_width
), AV_OPT_TYPE_INT
, { .i64
= 0 }, 0, INT_MAX
, .flags
= FLAGS
},
423 { "h", "Output video height",
424 OFFSET(output_height
), AV_OPT_TYPE_INT
, { .i64
= 0 }, 0, INT_MAX
, .flags
= FLAGS
},
425 { "format", "Output video format (software format of hardware frames)",
426 OFFSET(output_format_string
), AV_OPT_TYPE_STRING
, .flags
= FLAGS
},
430 static const AVClass scale_vaapi_class
= {
431 .class_name
= "scale_vaapi",
432 .item_name
= av_default_item_name
,
433 .option
= scale_vaapi_options
,
434 .version
= LIBAVUTIL_VERSION_INT
,
437 static const AVFilterPad scale_vaapi_inputs
[] = {
440 .type
= AVMEDIA_TYPE_VIDEO
,
441 .filter_frame
= &scale_vaapi_filter_frame
,
442 .config_props
= &scale_vaapi_config_input
,
447 static const AVFilterPad scale_vaapi_outputs
[] = {
450 .type
= AVMEDIA_TYPE_VIDEO
,
451 .config_props
= &scale_vaapi_config_output
,
456 AVFilter ff_vf_scale_vaapi
= {
457 .name
= "scale_vaapi",
458 .description
= NULL_IF_CONFIG_SMALL("Scale to/from VAAPI surfaces."),
459 .priv_size
= sizeof(ScaleVAAPIContext
),
460 .init
= &scale_vaapi_init
,
461 .uninit
= &scale_vaapi_uninit
,
462 .query_formats
= &scale_vaapi_query_formats
,
463 .inputs
= scale_vaapi_inputs
,
464 .outputs
= scale_vaapi_outputs
,
465 .priv_class
= &scale_vaapi_class
,
466 .flags_internal
= FF_FILTER_FLAG_HWFRAME_AWARE
,