2 * Intel MediaSDK QSV codec-independent code
4 * copyright (c) 2013 Luca Barbato
5 * copyright (c) 2015 Anton Khirnov <anton@khirnov.net>
7 * This file is part of Libav.
9 * Libav is free software; you can redistribute it and/or
10 * modify it under the terms of the GNU Lesser General Public
11 * License as published by the Free Software Foundation; either
12 * version 2.1 of the License, or (at your option) any later version.
14 * Libav is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
17 * Lesser General Public License for more details.
19 * You should have received a copy of the GNU Lesser General Public
20 * License along with Libav; if not, write to the Free Software
21 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
25 #include <sys/types.h>
27 #include <mfx/mfxvideo.h>
29 #include "libavutil/common.h"
30 #include "libavutil/hwcontext.h"
31 #include "libavutil/hwcontext_qsv.h"
32 #include "libavutil/mem.h"
33 #include "libavutil/log.h"
34 #include "libavutil/pixdesc.h"
35 #include "libavutil/pixfmt.h"
36 #include "libavutil/time.h"
41 #include "qsv_internal.h"
44 static int qsv_init_session(AVCodecContext
*avctx
, QSVContext
*q
, mfxSession session
,
45 AVBufferRef
*hw_frames_ref
, AVBufferRef
*hw_device_ref
)
51 } else if (hw_frames_ref
) {
52 if (q
->internal_session
) {
53 MFXClose(q
->internal_session
);
54 q
->internal_session
= NULL
;
56 av_buffer_unref(&q
->frames_ctx
.hw_frames_ctx
);
58 q
->frames_ctx
.hw_frames_ctx
= av_buffer_ref(hw_frames_ref
);
59 if (!q
->frames_ctx
.hw_frames_ctx
)
60 return AVERROR(ENOMEM
);
62 ret
= ff_qsv_init_session_frames(avctx
, &q
->internal_session
,
63 &q
->frames_ctx
, q
->load_plugins
,
64 q
->iopattern
== MFX_IOPATTERN_OUT_OPAQUE_MEMORY
);
66 av_buffer_unref(&q
->frames_ctx
.hw_frames_ctx
);
70 q
->session
= q
->internal_session
;
71 } else if (hw_device_ref
) {
72 if (q
->internal_session
) {
73 MFXClose(q
->internal_session
);
74 q
->internal_session
= NULL
;
77 ret
= ff_qsv_init_session_device(avctx
, &q
->internal_session
,
78 hw_device_ref
, q
->load_plugins
);
82 q
->session
= q
->internal_session
;
84 if (!q
->internal_session
) {
85 ret
= ff_qsv_init_internal_session(avctx
, &q
->internal_session
,
91 q
->session
= q
->internal_session
;
94 /* make sure the decoder is uninitialized */
95 MFXVideoDECODE_Close(q
->session
);
100 static int qsv_decode_init(AVCodecContext
*avctx
, QSVContext
*q
)
102 const AVPixFmtDescriptor
*desc
;
103 mfxSession session
= NULL
;
105 mfxVideoParam param
= { 0 };
106 int frame_width
= avctx
->coded_width
;
107 int frame_height
= avctx
->coded_height
;
110 desc
= av_pix_fmt_desc_get(avctx
->sw_pix_fmt
);
114 if (!q
->async_fifo
) {
115 q
->async_fifo
= av_fifo_alloc((1 + q
->async_depth
) *
116 (sizeof(mfxSyncPoint
*) + sizeof(QSVFrame
*)));
118 return AVERROR(ENOMEM
);
121 if (avctx
->pix_fmt
== AV_PIX_FMT_QSV
&& avctx
->hwaccel_context
) {
122 AVQSVContext
*user_ctx
= avctx
->hwaccel_context
;
123 session
= user_ctx
->session
;
124 iopattern
= user_ctx
->iopattern
;
125 q
->ext_buffers
= user_ctx
->ext_buffers
;
126 q
->nb_ext_buffers
= user_ctx
->nb_ext_buffers
;
129 if (avctx
->hw_frames_ctx
) {
130 AVHWFramesContext
*frames_ctx
= (AVHWFramesContext
*)avctx
->hw_frames_ctx
->data
;
131 AVQSVFramesContext
*frames_hwctx
= frames_ctx
->hwctx
;
134 if (frames_hwctx
->frame_type
& MFX_MEMTYPE_OPAQUE_FRAME
)
135 iopattern
= MFX_IOPATTERN_OUT_OPAQUE_MEMORY
;
136 else if (frames_hwctx
->frame_type
& MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET
)
137 iopattern
= MFX_IOPATTERN_OUT_VIDEO_MEMORY
;
140 frame_width
= frames_hwctx
->surfaces
[0].Info
.Width
;
141 frame_height
= frames_hwctx
->surfaces
[0].Info
.Height
;
145 iopattern
= MFX_IOPATTERN_OUT_SYSTEM_MEMORY
;
146 q
->iopattern
= iopattern
;
148 ret
= qsv_init_session(avctx
, q
, session
, avctx
->hw_frames_ctx
, avctx
->hw_device_ctx
);
150 av_log(avctx
, AV_LOG_ERROR
, "Error initializing an MFX session\n");
154 ret
= ff_qsv_codec_id_to_mfx(avctx
->codec_id
);
158 param
.mfx
.CodecId
= ret
;
159 param
.mfx
.CodecProfile
= ff_qsv_profile_to_mfx(avctx
->codec_id
, avctx
->profile
);
160 param
.mfx
.CodecLevel
= avctx
->level
== FF_LEVEL_UNKNOWN ? MFX_LEVEL_UNKNOWN
: avctx
->level
;
162 param
.mfx
.FrameInfo
.BitDepthLuma
= desc
->comp
[0].depth
;
163 param
.mfx
.FrameInfo
.BitDepthChroma
= desc
->comp
[0].depth
;
164 param
.mfx
.FrameInfo
.Shift
= desc
->comp
[0].depth
> 8;
165 param
.mfx
.FrameInfo
.FourCC
= q
->fourcc
;
166 param
.mfx
.FrameInfo
.Width
= frame_width
;
167 param
.mfx
.FrameInfo
.Height
= frame_height
;
168 param
.mfx
.FrameInfo
.ChromaFormat
= MFX_CHROMAFORMAT_YUV420
;
170 switch (avctx
->field_order
) {
171 case AV_FIELD_PROGRESSIVE
:
172 param
.mfx
.FrameInfo
.PicStruct
= MFX_PICSTRUCT_PROGRESSIVE
;
175 param
.mfx
.FrameInfo
.PicStruct
= MFX_PICSTRUCT_FIELD_TFF
;
178 param
.mfx
.FrameInfo
.PicStruct
= MFX_PICSTRUCT_FIELD_BFF
;
181 param
.mfx
.FrameInfo
.PicStruct
= MFX_PICSTRUCT_UNKNOWN
;
185 param
.IOPattern
= q
->iopattern
;
186 param
.AsyncDepth
= q
->async_depth
;
187 param
.ExtParam
= q
->ext_buffers
;
188 param
.NumExtParam
= q
->nb_ext_buffers
;
190 ret
= MFXVideoDECODE_Init(q
->session
, ¶m
);
192 return ff_qsv_print_error(avctx
, ret
,
193 "Error initializing the MFX video decoder");
195 q
->frame_info
= param
.mfx
.FrameInfo
;
200 static int alloc_frame(AVCodecContext
*avctx
, QSVContext
*q
, QSVFrame
*frame
)
204 ret
= ff_get_buffer(avctx
, frame
->frame
, AV_GET_BUFFER_FLAG_REF
);
208 if (frame
->frame
->format
== AV_PIX_FMT_QSV
) {
209 frame
->surface
= *(mfxFrameSurface1
*)frame
->frame
->data
[3];
211 frame
->surface
.Info
= q
->frame_info
;
213 frame
->surface
.Data
.PitchLow
= frame
->frame
->linesize
[0];
214 frame
->surface
.Data
.Y
= frame
->frame
->data
[0];
215 frame
->surface
.Data
.UV
= frame
->frame
->data
[1];
218 if (q
->frames_ctx
.mids
) {
219 ret
= ff_qsv_find_surface_idx(&q
->frames_ctx
, frame
);
223 frame
->surface
.Data
.MemId
= &q
->frames_ctx
.mids
[ret
];
231 static void qsv_clear_unused_frames(QSVContext
*q
)
233 QSVFrame
*cur
= q
->work_frames
;
235 if (cur
->used
&& !cur
->surface
.Data
.Locked
&& !cur
->queued
) {
237 av_frame_unref(cur
->frame
);
243 static int get_surface(AVCodecContext
*avctx
, QSVContext
*q
, mfxFrameSurface1
**surf
)
245 QSVFrame
*frame
, **last
;
248 qsv_clear_unused_frames(q
);
250 frame
= q
->work_frames
;
251 last
= &q
->work_frames
;
254 ret
= alloc_frame(avctx
, q
, frame
);
257 *surf
= &frame
->surface
;
265 frame
= av_mallocz(sizeof(*frame
));
267 return AVERROR(ENOMEM
);
268 frame
->frame
= av_frame_alloc();
271 return AVERROR(ENOMEM
);
275 ret
= alloc_frame(avctx
, q
, frame
);
279 *surf
= &frame
->surface
;
284 static QSVFrame
*find_frame(QSVContext
*q
, mfxFrameSurface1
*surf
)
286 QSVFrame
*cur
= q
->work_frames
;
288 if (surf
== &cur
->surface
)
295 static int qsv_decode(AVCodecContext
*avctx
, QSVContext
*q
,
296 AVFrame
*frame
, int *got_frame
,
300 mfxFrameSurface1
*insurf
;
301 mfxFrameSurface1
*outsurf
;
303 mfxBitstream bs
= { { { 0 } } };
307 bs
.Data
= avpkt
->data
;
308 bs
.DataLength
= avpkt
->size
;
309 bs
.MaxLength
= bs
.DataLength
;
310 bs
.TimeStamp
= avpkt
->pts
;
313 sync
= av_mallocz(sizeof(*sync
));
316 return AVERROR(ENOMEM
);
320 ret
= get_surface(avctx
, q
, &insurf
);
326 ret
= MFXVideoDECODE_DecodeFrameAsync(q
->session
, avpkt
->size ?
&bs
: NULL
,
327 insurf
, &outsurf
, sync
);
328 if (ret
== MFX_WRN_DEVICE_BUSY
)
331 } while (ret
== MFX_WRN_DEVICE_BUSY
|| ret
== MFX_ERR_MORE_SURFACE
);
333 if (ret
!= MFX_ERR_NONE
&&
334 ret
!= MFX_ERR_MORE_DATA
&&
335 ret
!= MFX_WRN_VIDEO_PARAM_CHANGED
&&
336 ret
!= MFX_ERR_MORE_SURFACE
) {
338 return ff_qsv_print_error(avctx
, ret
,
339 "Error during QSV decoding.");
342 /* make sure we do not enter an infinite loop if the SDK
343 * did not consume any data and did not return anything */
344 if (!*sync
&& !bs
.DataOffset
) {
345 bs
.DataOffset
= avpkt
->size
;
346 ++q
->zero_consume_run
;
347 if (q
->zero_consume_run
> 1)
348 ff_qsv_print_warning(avctx
, ret
, "A decode call did not consume any data");
350 q
->zero_consume_run
= 0;
354 QSVFrame
*out_frame
= find_frame(q
, outsurf
);
357 av_log(avctx
, AV_LOG_ERROR
,
358 "The returned surface does not correspond to any frame\n");
363 out_frame
->queued
= 1;
364 av_fifo_generic_write(q
->async_fifo
, &out_frame
, sizeof(out_frame
), NULL
);
365 av_fifo_generic_write(q
->async_fifo
, &sync
, sizeof(sync
), NULL
);
370 if (!av_fifo_space(q
->async_fifo
) ||
371 (!avpkt
->size
&& av_fifo_size(q
->async_fifo
))) {
374 av_fifo_generic_read(q
->async_fifo
, &out_frame
, sizeof(out_frame
), NULL
);
375 av_fifo_generic_read(q
->async_fifo
, &sync
, sizeof(sync
), NULL
);
376 out_frame
->queued
= 0;
378 if (avctx
->pix_fmt
!= AV_PIX_FMT_QSV
) {
380 ret
= MFXVideoCORE_SyncOperation(q
->session
, *sync
, 1000);
381 } while (ret
== MFX_WRN_IN_EXECUTION
);
386 src_frame
= out_frame
->frame
;
388 ret
= av_frame_ref(frame
, src_frame
);
392 outsurf
= &out_frame
->surface
;
395 FF_DISABLE_DEPRECATION_WARNINGS
396 frame
->pkt_pts
= outsurf
->Data
.TimeStamp
;
397 FF_ENABLE_DEPRECATION_WARNINGS
399 frame
->pts
= outsurf
->Data
.TimeStamp
;
402 outsurf
->Info
.PicStruct
& MFX_PICSTRUCT_FRAME_TRIPLING ?
4 :
403 outsurf
->Info
.PicStruct
& MFX_PICSTRUCT_FRAME_DOUBLING ?
2 :
404 outsurf
->Info
.PicStruct
& MFX_PICSTRUCT_FIELD_REPEATED ?
1 : 0;
405 frame
->top_field_first
=
406 outsurf
->Info
.PicStruct
& MFX_PICSTRUCT_FIELD_TFF
;
407 frame
->interlaced_frame
=
408 !(outsurf
->Info
.PicStruct
& MFX_PICSTRUCT_PROGRESSIVE
);
410 /* update the surface properties */
411 if (avctx
->pix_fmt
== AV_PIX_FMT_QSV
)
412 ((mfxFrameSurface1
*)frame
->data
[3])->Info
= outsurf
->Info
;
417 return bs
.DataOffset
;
420 int ff_qsv_decode_close(QSVContext
*q
)
422 QSVFrame
*cur
= q
->work_frames
;
425 MFXVideoDECODE_Close(q
->session
);
427 while (q
->async_fifo
&& av_fifo_size(q
->async_fifo
)) {
431 av_fifo_generic_read(q
->async_fifo
, &out_frame
, sizeof(out_frame
), NULL
);
432 av_fifo_generic_read(q
->async_fifo
, &sync
, sizeof(sync
), NULL
);
438 q
->work_frames
= cur
->next
;
439 av_frame_free(&cur
->frame
);
441 cur
= q
->work_frames
;
444 av_fifo_free(q
->async_fifo
);
445 q
->async_fifo
= NULL
;
447 av_parser_close(q
->parser
);
448 avcodec_free_context(&q
->avctx_internal
);
450 if (q
->internal_session
)
451 MFXClose(q
->internal_session
);
453 av_buffer_unref(&q
->frames_ctx
.hw_frames_ctx
);
454 av_buffer_unref(&q
->frames_ctx
.mids_buf
);
459 int ff_qsv_process_data(AVCodecContext
*avctx
, QSVContext
*q
,
460 AVFrame
*frame
, int *got_frame
, AVPacket
*pkt
)
466 if (!q
->avctx_internal
) {
467 q
->avctx_internal
= avcodec_alloc_context3(NULL
);
468 if (!q
->avctx_internal
)
469 return AVERROR(ENOMEM
);
471 if (avctx
->extradata
) {
472 q
->avctx_internal
->extradata
= av_mallocz(avctx
->extradata_size
+ AV_INPUT_BUFFER_PADDING_SIZE
);
473 if (!q
->avctx_internal
->extradata
)
474 return AVERROR(ENOMEM
);
476 memcpy(q
->avctx_internal
->extradata
, avctx
->extradata
,
477 avctx
->extradata_size
);
478 q
->avctx_internal
->extradata_size
= avctx
->extradata_size
;
481 q
->parser
= av_parser_init(avctx
->codec_id
);
483 return AVERROR(ENOMEM
);
485 q
->parser
->flags
|= PARSER_FLAG_COMPLETE_FRAMES
;
486 q
->orig_pix_fmt
= AV_PIX_FMT_NONE
;
490 return qsv_decode(avctx
, q
, frame
, got_frame
, pkt
);
492 /* we assume the packets are already split properly and want
493 * just the codec parameters here */
494 av_parser_parse2(q
->parser
, q
->avctx_internal
,
495 &dummy_data
, &dummy_size
,
496 pkt
->data
, pkt
->size
, pkt
->pts
, pkt
->dts
,
499 /* TODO: flush delayed frames on reinit */
500 if (q
->parser
->format
!= q
->orig_pix_fmt
||
501 q
->parser
->coded_width
!= avctx
->coded_width
||
502 q
->parser
->coded_height
!= avctx
->coded_height
) {
503 enum AVPixelFormat pix_fmts
[3] = { AV_PIX_FMT_QSV
,
506 enum AVPixelFormat qsv_format
;
508 qsv_format
= ff_qsv_map_pixfmt(q
->parser
->format
, &q
->fourcc
);
509 if (qsv_format
< 0) {
510 av_log(avctx
, AV_LOG_ERROR
,
511 "Decoding pixel format '%s' is not supported\n",
512 av_get_pix_fmt_name(q
->parser
->format
));
513 ret
= AVERROR(ENOSYS
);
517 q
->orig_pix_fmt
= q
->parser
->format
;
518 avctx
->pix_fmt
= pix_fmts
[1] = qsv_format
;
519 avctx
->width
= q
->parser
->width
;
520 avctx
->height
= q
->parser
->height
;
521 avctx
->coded_width
= q
->parser
->coded_width
;
522 avctx
->coded_height
= q
->parser
->coded_height
;
523 avctx
->field_order
= q
->parser
->field_order
;
524 avctx
->level
= q
->avctx_internal
->level
;
525 avctx
->profile
= q
->avctx_internal
->profile
;
527 ret
= ff_get_format(avctx
, pix_fmts
);
531 avctx
->pix_fmt
= ret
;
533 ret
= qsv_decode_init(avctx
, q
);
538 return qsv_decode(avctx
, q
, frame
, got_frame
, pkt
);
541 q
->orig_pix_fmt
= q
->parser
->format
= avctx
->pix_fmt
= AV_PIX_FMT_NONE
;
545 void ff_qsv_decode_flush(AVCodecContext
*avctx
, QSVContext
*q
)
547 q
->orig_pix_fmt
= AV_PIX_FMT_NONE
;