h264_metadata: Add option to delete filler data
[libav.git] / libavfilter / vf_overlay_qsv.c
CommitLineData
a5a6ac1a
HZ
1/*
2 * This file is part of Libav.
3 *
4 * Libav is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
8 *
9 * Libav is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
13 *
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with Libav; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
17 */
18
19/**
20 * @file
21 * A hardware accelerated overlay filter based on Intel Quick Sync Video VPP
22 */
23
24#include "libavutil/opt.h"
25#include "libavutil/common.h"
26#include "libavutil/pixdesc.h"
27#include "libavutil/eval.h"
28#include "libavutil/hwcontext.h"
29#include "libavutil/avstring.h"
30#include "libavutil/avassert.h"
31#include "libavutil/imgutils.h"
32#include "libavutil/mathematics.h"
33
34#include "internal.h"
35#include "avfilter.h"
36#include "formats.h"
37#include "video.h"
38
39#include "qsvvpp.h"
40
41#define MAIN 0
42#define OVERLAY 1
43
44#define OFFSET(x) offsetof(QSVOverlayContext, x)
45#define FLAGS AV_OPT_FLAG_VIDEO_PARAM
46
47enum var_name {
48 VAR_MAIN_iW, VAR_MW,
49 VAR_MAIN_iH, VAR_MH,
50 VAR_OVERLAY_iW,
51 VAR_OVERLAY_iH,
52 VAR_OVERLAY_X, VAR_OX,
53 VAR_OVERLAY_Y, VAR_OY,
54 VAR_OVERLAY_W, VAR_OW,
55 VAR_OVERLAY_H, VAR_OH,
56 VAR_VARS_NB
57};
58
59enum EOFAction {
60 EOF_ACTION_REPEAT,
61 EOF_ACTION_ENDALL
62};
63
64typedef struct QSVOverlayContext {
65 const AVClass *class;
66
67 QSVVPPContext *qsv;
68 QSVVPPParam qsv_param;
69 mfxExtVPPComposite comp_conf;
70 double var_values[VAR_VARS_NB];
71
72 char *overlay_ox, *overlay_oy, *overlay_ow, *overlay_oh;
73 uint16_t overlay_alpha, overlay_pixel_alpha;
74
75 enum EOFAction eof_action; /* action to take on EOF from source */
76
77 AVFrame *main;
78 AVFrame *over_prev, *over_next;
79} QSVOverlayContext;
80
81static const char *const var_names[] = {
82 "main_w", "W", /* input width of the main layer */
83 "main_h", "H", /* input height of the main layer */
84 "overlay_iw", /* input width of the overlay layer */
85 "overlay_ih", /* input height of the overlay layer */
86 "overlay_x", "x", /* x position of the overlay layer inside of main */
87 "overlay_y", "y", /* y position of the overlay layer inside of main */
88 "overlay_w", "w", /* output width of overlay layer */
89 "overlay_h", "h", /* output height of overlay layer */
90 NULL
91};
92
93static const AVOption options[] = {
94 { "x", "Overlay x position", OFFSET(overlay_ox), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
95 { "y", "Overlay y position", OFFSET(overlay_oy), AV_OPT_TYPE_STRING, { .str="0"}, 0, 255, .flags = FLAGS},
96 { "w", "Overlay width", OFFSET(overlay_ow), AV_OPT_TYPE_STRING, { .str="overlay_iw"}, 0, 255, .flags = FLAGS},
97 { "h", "Overlay height", OFFSET(overlay_oh), AV_OPT_TYPE_STRING, { .str="overlay_ih*w/overlay_iw"}, 0, 255, .flags = FLAGS},
98 { "alpha", "Overlay global alpha", OFFSET(overlay_alpha), AV_OPT_TYPE_INT, { .i64 = 255}, 0, 255, .flags = FLAGS},
99 { "eof_action", "Action to take when encountering EOF from secondary input ",
100 OFFSET(eof_action), AV_OPT_TYPE_INT, { .i64 = EOF_ACTION_REPEAT },
101 EOF_ACTION_REPEAT, EOF_ACTION_ENDALL, .flags = FLAGS, "eof_action" },
102 { "repeat", "Repeat the previous frame.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_REPEAT }, .flags = FLAGS, "eof_action" },
103 { "endall", "End both streams.", 0, AV_OPT_TYPE_CONST, { .i64 = EOF_ACTION_ENDALL }, .flags = FLAGS, "eof_action" },
104 { NULL }
105};
106
107static int eval_expr(AVFilterContext *ctx)
108{
109 QSVOverlayContext *vpp = ctx->priv;
110 double *var_values = vpp->var_values;
111 int ret = 0;
112 AVExpr *ox_expr = NULL, *oy_expr = NULL;
113 AVExpr *ow_expr = NULL, *oh_expr = NULL;
114
115#define PASS_EXPR(e, s) {\
116 ret = av_expr_parse(&e, s, var_names, NULL, NULL, NULL, NULL, 0, ctx); \
117 if (ret < 0) {\
118 av_log(ctx, AV_LOG_ERROR, "Error when passing '%s'.\n", s);\
119 goto release;\
120 }\
121}
122 PASS_EXPR(ox_expr, vpp->overlay_ox);
123 PASS_EXPR(oy_expr, vpp->overlay_oy);
124 PASS_EXPR(ow_expr, vpp->overlay_ow);
125 PASS_EXPR(oh_expr, vpp->overlay_oh);
126#undef PASS_EXPR
127
128 var_values[VAR_OVERLAY_W] =
129 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
130 var_values[VAR_OVERLAY_H] =
131 var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
132
133 /* calc again in case ow is relative to oh */
134 var_values[VAR_OVERLAY_W] =
135 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
136
137 var_values[VAR_OVERLAY_X] =
138 var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
139 var_values[VAR_OVERLAY_Y] =
140 var_values[VAR_OY] = av_expr_eval(oy_expr, var_values, NULL);
141
142 /* calc again in case ox is relative to oy */
143 var_values[VAR_OVERLAY_X] =
144 var_values[VAR_OX] = av_expr_eval(ox_expr, var_values, NULL);
145
146 /* calc overlay_w and overlay_h again incase relative to ox,oy */
147 var_values[VAR_OVERLAY_W] =
148 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
149 var_values[VAR_OVERLAY_H] =
150 var_values[VAR_OH] = av_expr_eval(oh_expr, var_values, NULL);
151 var_values[VAR_OVERLAY_W] =
152 var_values[VAR_OW] = av_expr_eval(ow_expr, var_values, NULL);
153
154release:
155 av_expr_free(ox_expr);
156 av_expr_free(oy_expr);
157 av_expr_free(ow_expr);
158 av_expr_free(oh_expr);
159
160 return ret;
161}
162
163static int have_alpha_planar(AVFilterLink *link)
164{
165 enum AVPixelFormat pix_fmt;
166 const AVPixFmtDescriptor *desc;
167 AVHWFramesContext *fctx;
168
169 if (link->format == AV_PIX_FMT_QSV) {
170 fctx = (AVHWFramesContext *)link->hw_frames_ctx->data;
171 pix_fmt = fctx->sw_format;
172 }
173
174 desc = av_pix_fmt_desc_get(pix_fmt);
175 if (!desc)
176 return 0;
177
178 return !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
179}
180
181static int config_main_input(AVFilterLink *inlink)
182{
183 AVFilterContext *ctx = inlink->dst;
184 QSVOverlayContext *vpp = ctx->priv;
185 mfxVPPCompInputStream *st = &vpp->comp_conf.InputStream[0];
186
187 av_log(ctx, AV_LOG_DEBUG, "Input[%d] is of %s.\n", FF_INLINK_IDX(inlink),
188 av_get_pix_fmt_name(inlink->format));
189
190 vpp->var_values[VAR_MAIN_iW] =
191 vpp->var_values[VAR_MW] = inlink->w;
192 vpp->var_values[VAR_MAIN_iH] =
193 vpp->var_values[VAR_MH] = inlink->h;
194
195 st->DstX = 0;
196 st->DstY = 0;
197 st->DstW = inlink->w;
198 st->DstH = inlink->h;
199 st->GlobalAlphaEnable = 0;
200 st->PixelAlphaEnable = 0;
201
202 return 0;
203}
204
205static int config_overlay_input(AVFilterLink *inlink)
206{
207 AVFilterContext *ctx = inlink->dst;
208 QSVOverlayContext *vpp = ctx->priv;
209 mfxVPPCompInputStream *st = &vpp->comp_conf.InputStream[1];
210 int ret = 0;
211
212 av_log(ctx, AV_LOG_DEBUG, "Input[%d] is of %s.\n", FF_INLINK_IDX(inlink),
213 av_get_pix_fmt_name(inlink->format));
214
215 vpp->var_values[VAR_OVERLAY_iW] = inlink->w;
216 vpp->var_values[VAR_OVERLAY_iH] = inlink->h;
217
218 ret = eval_expr(ctx);
219 if (ret < 0)
220 return ret;
221
222 st->DstX = vpp->var_values[VAR_OX];
223 st->DstY = vpp->var_values[VAR_OY];
224 st->DstW = vpp->var_values[VAR_OW];
225 st->DstH = vpp->var_values[VAR_OH];
226 st->GlobalAlpha = vpp->overlay_alpha;
227 st->GlobalAlphaEnable = (st->GlobalAlpha < 255);
228 st->PixelAlphaEnable = have_alpha_planar(inlink);
229
230 return 0;
231}
232
233static int config_output(AVFilterLink *outlink)
234{
235 AVFilterContext *ctx = outlink->src;
236 QSVOverlayContext *vpp = ctx->priv;
237 AVFilterLink *in0 = ctx->inputs[0];
238 AVFilterLink *in1 = ctx->inputs[1];
239
240 av_log(ctx, AV_LOG_DEBUG, "Output is of %s.\n", av_get_pix_fmt_name(outlink->format));
241 if ((in0->format == AV_PIX_FMT_QSV && in1->format != AV_PIX_FMT_QSV) ||
242 (in0->format != AV_PIX_FMT_QSV && in1->format == AV_PIX_FMT_QSV)) {
243 av_log(ctx, AV_LOG_ERROR, "Mixing hardware and software pixel formats is not supported.\n");
244 return AVERROR(EINVAL);
245 } else if (in0->format == AV_PIX_FMT_QSV) {
246 AVHWFramesContext *hw_frame0 = (AVHWFramesContext *)in0->hw_frames_ctx->data;
247 AVHWFramesContext *hw_frame1 = (AVHWFramesContext *)in1->hw_frames_ctx->data;
248
249 if (hw_frame0->device_ctx != hw_frame1->device_ctx) {
250 av_log(ctx, AV_LOG_ERROR, "Inputs with different underlying QSV devices are forbidden.\n");
251 return AVERROR(EINVAL);
252 }
253 }
254
255 outlink->w = vpp->var_values[VAR_MW];
256 outlink->h = vpp->var_values[VAR_MH];
257 outlink->frame_rate = in0->frame_rate;
258 outlink->time_base = av_inv_q(outlink->frame_rate);
259
260 return ff_qsvvpp_create(ctx, &vpp->qsv, &vpp->qsv_param);
261}
262
263static int blend_frame(AVFilterContext *ctx, AVFrame *mpic, AVFrame *opic)
264{
265 int ret = 0;
266 QSVOverlayContext *vpp = ctx->priv;
267 AVFrame *opic_copy = NULL;
268
269 ret = ff_qsvvpp_filter_frame(vpp->qsv, ctx->inputs[0], mpic);
270 if (ret == 0 || ret == AVERROR(EAGAIN)) {
271 /* Reference the overlay frame. Because:
272 * 1. ff_qsvvpp_filter_frame will take control of the given frame
273 * 2. We need to repeat the overlay frame when 2nd input goes into EOF
274 */
275 opic_copy = av_frame_clone(opic);
276 if (!opic_copy)
277 return AVERROR(ENOMEM);
278
279 ret = ff_qsvvpp_filter_frame(vpp->qsv, ctx->inputs[1], opic_copy);
280 }
281
282 return ret;
283}
284
285static int handle_overlay_eof(AVFilterContext *ctx)
286{
287 int ret = 0;
288 QSVOverlayContext *s = ctx->priv;
289 /* Repeat previous frame on secondary input */
290 if (s->over_prev && s->eof_action == EOF_ACTION_REPEAT)
291 ret = blend_frame(ctx, s->main, s->over_prev);
292 /* End both streams */
293 else if (s->eof_action == EOF_ACTION_ENDALL)
294 return AVERROR_EOF;
295
296 s->main = NULL;
297
298 return ret;
299}
300
301static int request_frame(AVFilterLink *outlink)
302{
303 AVFilterContext *ctx = outlink->src;
304 QSVOverlayContext *s = ctx->priv;
305 AVRational tb_main = ctx->inputs[MAIN]->time_base;
306 AVRational tb_over = ctx->inputs[OVERLAY]->time_base;
307 int ret = 0;
308
309 /* get a frame on the main input */
310 if (!s->main) {
311 ret = ff_request_frame(ctx->inputs[MAIN]);
312 if (ret < 0)
313 return ret;
314 }
315
316 /* get a new frame on the overlay input, on EOF check setting 'eof_action' */
317 if (!s->over_next) {
318 ret = ff_request_frame(ctx->inputs[OVERLAY]);
319 if (ret == AVERROR_EOF)
320 return handle_overlay_eof(ctx);
321 else if (ret < 0)
322 return ret;
323 }
324
325 while (s->main->pts != AV_NOPTS_VALUE &&
326 s->over_next->pts != AV_NOPTS_VALUE &&
327 av_compare_ts(s->over_next->pts, tb_over, s->main->pts, tb_main) < 0) {
328 av_frame_free(&s->over_prev);
329 FFSWAP(AVFrame*, s->over_prev, s->over_next);
330
331 ret = ff_request_frame(ctx->inputs[OVERLAY]);
332 if (ret == AVERROR_EOF)
333 return handle_overlay_eof(ctx);
334 else if (ret < 0)
335 return ret;
336 }
337
338 if (s->main->pts == AV_NOPTS_VALUE ||
339 s->over_next->pts == AV_NOPTS_VALUE ||
340 !av_compare_ts(s->over_next->pts, tb_over, s->main->pts, tb_main)) {
341 ret = blend_frame(ctx, s->main, s->over_next);
342 av_frame_free(&s->over_prev);
343 FFSWAP(AVFrame*, s->over_prev, s->over_next);
344 } else if (s->over_prev) {
345 ret = blend_frame(ctx, s->main, s->over_prev);
346 } else {
347 av_frame_free(&s->main);
348 ret = AVERROR(EAGAIN);
349 }
350
351 s->main = NULL;
352
353 return ret;
354}
355
356static int filter_frame_main(AVFilterLink *inlink, AVFrame *frame)
357{
358 QSVOverlayContext *s = inlink->dst->priv;
359
360 av_assert0(!s->main);
361 s->main = frame;
362
363 return 0;
364}
365
366static int filter_frame_overlay(AVFilterLink *inlink, AVFrame *frame)
367{
368 QSVOverlayContext *s = inlink->dst->priv;
369
370 av_assert0(!s->over_next);
371 s->over_next = frame;
372
373 return 0;
374}
375
376static int overlay_qsv_init(AVFilterContext *ctx)
377{
378 QSVOverlayContext *vpp = ctx->priv;
379
380 /* fill composite config */
381 vpp->comp_conf.Header.BufferId = MFX_EXTBUFF_VPP_COMPOSITE;
382 vpp->comp_conf.Header.BufferSz = sizeof(vpp->comp_conf);
383 vpp->comp_conf.NumInputStream = ctx->nb_inputs;
384 vpp->comp_conf.InputStream = av_mallocz_array(ctx->nb_inputs,
385 sizeof(*vpp->comp_conf.InputStream));
386 if (!vpp->comp_conf.InputStream)
387 return AVERROR(ENOMEM);
388
389 /* initialize QSVVPP params */
390 vpp->qsv_param.filter_frame = NULL;
391 vpp->qsv_param.ext_buf = av_mallocz(sizeof(*vpp->qsv_param.ext_buf));
392 if (!vpp->qsv_param.ext_buf)
393 return AVERROR(ENOMEM);
394
395 vpp->qsv_param.ext_buf[0] = (mfxExtBuffer *)&vpp->comp_conf;
396 vpp->qsv_param.num_ext_buf = 1;
397 vpp->qsv_param.out_sw_format = AV_PIX_FMT_NV12;
398 vpp->qsv_param.num_crop = 0;
399
400 return 0;
401}
402
403static void overlay_qsv_uninit(AVFilterContext *ctx)
404{
405 QSVOverlayContext *vpp = ctx->priv;
406
407 av_frame_free(&vpp->main);
408 av_frame_free(&vpp->over_prev);
409 av_frame_free(&vpp->over_next);
410 ff_qsvvpp_free(&vpp->qsv);
411 av_freep(&vpp->comp_conf.InputStream);
412 av_freep(&vpp->qsv_param.ext_buf);
413}
414
415static int overlay_qsv_query_formats(AVFilterContext *ctx)
416{
417 int i;
418
419 static const enum AVPixelFormat main_in_fmts[] = {
420 AV_PIX_FMT_YUV420P,
421 AV_PIX_FMT_NV12,
422 AV_PIX_FMT_YUYV422,
423 AV_PIX_FMT_RGB32,
424 AV_PIX_FMT_QSV,
425 AV_PIX_FMT_NONE
426 };
427 static const enum AVPixelFormat out_pix_fmts[] = {
428 AV_PIX_FMT_NV12,
429 AV_PIX_FMT_QSV,
430 AV_PIX_FMT_NONE
431 };
432
433 for (i = 0; i < ctx->nb_inputs; i++)
434 ff_formats_ref(ff_make_format_list(main_in_fmts), &ctx->inputs[i]->out_formats);
435
436 ff_formats_ref(ff_make_format_list(out_pix_fmts), &ctx->outputs[0]->in_formats);
437
438 return 0;
439}
440
441static const AVClass overlay_qsv_class = {
442 .class_name = "overlay_qsv",
443 .item_name = av_default_item_name,
444 .option = options,
445 .version = LIBAVUTIL_VERSION_INT,
446};
447
448static const AVFilterPad overlay_qsv_inputs[] = {
449 {
450 .name = "main",
451 .type = AVMEDIA_TYPE_VIDEO,
452 .filter_frame = filter_frame_main,
453 .config_props = config_main_input,
454 .needs_fifo = 1,
455 },
456 {
457 .name = "overlay",
458 .type = AVMEDIA_TYPE_VIDEO,
459 .filter_frame = filter_frame_overlay,
460 .config_props = config_overlay_input,
461 .needs_fifo = 1,
462 },
463 { NULL }
464};
465
466static const AVFilterPad overlay_qsv_outputs[] = {
467 {
468 .name = "default",
469 .type = AVMEDIA_TYPE_VIDEO,
470 .config_props = config_output,
471 .request_frame = request_frame,
472 },
473 { NULL }
474};
475
476AVFilter ff_vf_overlay_qsv = {
477 .name = "overlay_qsv",
478 .description = NULL_IF_CONFIG_SMALL("Quick Sync Video overlay."),
479 .priv_size = sizeof(QSVOverlayContext),
480 .query_formats = overlay_qsv_query_formats,
481 .init = overlay_qsv_init,
482 .uninit = overlay_qsv_uninit,
483 .inputs = overlay_qsv_inputs,
484 .outputs = overlay_qsv_outputs,
485 .priv_class = &overlay_qsv_class,
486 .flags_internal = FF_FILTER_FLAG_HWFRAME_AWARE,
487};