Commit | Line | Data |
---|---|---|
4e08c821 AK |
1 | /* |
2 | * Intel MediaSDK QSV codec-independent code | |
3 | * | |
4 | * copyright (c) 2013 Luca Barbato | |
5 | * copyright (c) 2015 Anton Khirnov <anton@khirnov.net> | |
6 | * | |
7 | * This file is part of Libav. | |
8 | * | |
9 | * Libav is free software; you can redistribute it and/or | |
10 | * modify it under the terms of the GNU Lesser General Public | |
11 | * License as published by the Free Software Foundation; either | |
12 | * version 2.1 of the License, or (at your option) any later version. | |
13 | * | |
14 | * Libav is distributed in the hope that it will be useful, | |
15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
17 | * Lesser General Public License for more details. | |
18 | * | |
19 | * You should have received a copy of the GNU Lesser General Public | |
20 | * License along with Libav; if not, write to the Free Software | |
21 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
22 | */ | |
23 | ||
24 | #include <string.h> | |
25 | #include <sys/types.h> | |
26 | ||
27 | #include <mfx/mfxvideo.h> | |
28 | ||
29 | #include "libavutil/common.h" | |
a0524d9b AK |
30 | #include "libavutil/hwcontext.h" |
31 | #include "libavutil/hwcontext_qsv.h" | |
4e08c821 AK |
32 | #include "libavutil/mem.h" |
33 | #include "libavutil/log.h" | |
92736c74 | 34 | #include "libavutil/pixdesc.h" |
4e08c821 AK |
35 | #include "libavutil/pixfmt.h" |
36 | #include "libavutil/time.h" | |
37 | ||
38 | #include "avcodec.h" | |
39 | #include "internal.h" | |
96dca089 | 40 | #include "qsv.h" |
d0a63d8b | 41 | #include "qsv_internal.h" |
b04d009b | 42 | #include "qsvdec.h" |
4e08c821 | 43 | |
a0524d9b AK |
44 | static int qsv_init_session(AVCodecContext *avctx, QSVContext *q, mfxSession session, |
45 | AVBufferRef *hw_frames_ref) | |
4e08c821 | 46 | { |
a0524d9b AK |
47 | int ret; |
48 | ||
49 | if (session) { | |
50 | q->session = session; | |
51 | } else if (hw_frames_ref) { | |
52 | if (q->internal_session) { | |
53 | MFXClose(q->internal_session); | |
54 | q->internal_session = NULL; | |
55 | } | |
56 | av_buffer_unref(&q->frames_ctx.hw_frames_ctx); | |
57 | ||
58 | q->frames_ctx.hw_frames_ctx = av_buffer_ref(hw_frames_ref); | |
59 | if (!q->frames_ctx.hw_frames_ctx) | |
60 | return AVERROR(ENOMEM); | |
61 | ||
4936a48b MT |
62 | ret = ff_qsv_init_session_frames(avctx, &q->internal_session, |
63 | &q->frames_ctx, q->load_plugins, | |
64 | q->iopattern == MFX_IOPATTERN_OUT_OPAQUE_MEMORY); | |
a0524d9b AK |
65 | if (ret < 0) { |
66 | av_buffer_unref(&q->frames_ctx.hw_frames_ctx); | |
67 | return ret; | |
68 | } | |
69 | ||
70 | q->session = q->internal_session; | |
71 | } else { | |
4e08c821 | 72 | if (!q->internal_session) { |
a0524d9b AK |
73 | ret = ff_qsv_init_internal_session(avctx, &q->internal_session, |
74 | q->load_plugins); | |
d0a63d8b AK |
75 | if (ret < 0) |
76 | return ret; | |
4e08c821 AK |
77 | } |
78 | ||
79 | q->session = q->internal_session; | |
4e08c821 AK |
80 | } |
81 | ||
82 | /* make sure the decoder is uninitialized */ | |
83 | MFXVideoDECODE_Close(q->session); | |
84 | ||
85 | return 0; | |
86 | } | |
87 | ||
6f19bbcf | 88 | static int qsv_decode_init(AVCodecContext *avctx, QSVContext *q) |
4e08c821 | 89 | { |
92736c74 | 90 | const AVPixFmtDescriptor *desc; |
6f19bbcf | 91 | mfxSession session = NULL; |
a0524d9b | 92 | int iopattern = 0; |
76167140 | 93 | mfxVideoParam param = { 0 }; |
924e2ecd AK |
94 | int frame_width = avctx->coded_width; |
95 | int frame_height = avctx->coded_height; | |
4e08c821 AK |
96 | int ret; |
97 | ||
92736c74 AK |
98 | desc = av_pix_fmt_desc_get(avctx->sw_pix_fmt); |
99 | if (!desc) | |
100 | return AVERROR_BUG; | |
101 | ||
22522d9c AK |
102 | if (!q->async_fifo) { |
103 | q->async_fifo = av_fifo_alloc((1 + q->async_depth) * | |
3c53627a | 104 | (sizeof(mfxSyncPoint*) + sizeof(QSVFrame*))); |
22522d9c AK |
105 | if (!q->async_fifo) |
106 | return AVERROR(ENOMEM); | |
107 | } | |
f5c4d38c | 108 | |
e328178d | 109 | if (avctx->pix_fmt == AV_PIX_FMT_QSV && avctx->hwaccel_context) { |
6f19bbcf AK |
110 | AVQSVContext *user_ctx = avctx->hwaccel_context; |
111 | session = user_ctx->session; | |
a0524d9b | 112 | iopattern = user_ctx->iopattern; |
6f19bbcf AK |
113 | q->ext_buffers = user_ctx->ext_buffers; |
114 | q->nb_ext_buffers = user_ctx->nb_ext_buffers; | |
115 | } | |
116 | ||
a0524d9b AK |
117 | if (avctx->hw_frames_ctx) { |
118 | AVHWFramesContext *frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; | |
119 | AVQSVFramesContext *frames_hwctx = frames_ctx->hwctx; | |
120 | ||
121 | if (!iopattern) { | |
122 | if (frames_hwctx->frame_type & MFX_MEMTYPE_OPAQUE_FRAME) | |
123 | iopattern = MFX_IOPATTERN_OUT_OPAQUE_MEMORY; | |
124 | else if (frames_hwctx->frame_type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET) | |
125 | iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY; | |
126 | } | |
924e2ecd AK |
127 | |
128 | frame_width = frames_hwctx->surfaces[0].Info.Width; | |
129 | frame_height = frames_hwctx->surfaces[0].Info.Height; | |
a0524d9b AK |
130 | } |
131 | ||
132 | if (!iopattern) | |
133 | iopattern = MFX_IOPATTERN_OUT_SYSTEM_MEMORY; | |
134 | q->iopattern = iopattern; | |
135 | ||
136 | ret = qsv_init_session(avctx, q, session, avctx->hw_frames_ctx); | |
4e08c821 AK |
137 | if (ret < 0) { |
138 | av_log(avctx, AV_LOG_ERROR, "Error initializing an MFX session\n"); | |
139 | return ret; | |
140 | } | |
141 | ||
d0a63d8b | 142 | ret = ff_qsv_codec_id_to_mfx(avctx->codec_id); |
4e08c821 AK |
143 | if (ret < 0) |
144 | return ret; | |
145 | ||
146 | param.mfx.CodecId = ret; | |
cd1047f3 MT |
147 | param.mfx.CodecProfile = ff_qsv_profile_to_mfx(avctx->codec_id, avctx->profile); |
148 | param.mfx.CodecLevel = avctx->level == FF_LEVEL_UNKNOWN ? MFX_LEVEL_UNKNOWN : avctx->level; | |
4e08c821 | 149 | |
92736c74 AK |
150 | param.mfx.FrameInfo.BitDepthLuma = desc->comp[0].depth; |
151 | param.mfx.FrameInfo.BitDepthChroma = desc->comp[0].depth; | |
152 | param.mfx.FrameInfo.Shift = desc->comp[0].depth > 8; | |
536bb17e | 153 | param.mfx.FrameInfo.FourCC = q->fourcc; |
924e2ecd AK |
154 | param.mfx.FrameInfo.Width = frame_width; |
155 | param.mfx.FrameInfo.Height = frame_height; | |
4e08c821 AK |
156 | param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420; |
157 | ||
030d84fa MT |
158 | switch (avctx->field_order) { |
159 | case AV_FIELD_PROGRESSIVE: | |
160 | param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; | |
161 | break; | |
162 | case AV_FIELD_TT: | |
163 | param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_TFF; | |
164 | break; | |
165 | case AV_FIELD_BB: | |
166 | param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_FIELD_BFF; | |
167 | break; | |
168 | default: | |
169 | param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_UNKNOWN; | |
170 | break; | |
171 | } | |
172 | ||
4e08c821 AK |
173 | param.IOPattern = q->iopattern; |
174 | param.AsyncDepth = q->async_depth; | |
175 | param.ExtParam = q->ext_buffers; | |
176 | param.NumExtParam = q->nb_ext_buffers; | |
177 | ||
178 | ret = MFXVideoDECODE_Init(q->session, ¶m); | |
95414eb2 AK |
179 | if (ret < 0) |
180 | return ff_qsv_print_error(avctx, ret, | |
181 | "Error initializing the MFX video decoder"); | |
4e08c821 | 182 | |
ce320cf1 AK |
183 | q->frame_info = param.mfx.FrameInfo; |
184 | ||
4e08c821 AK |
185 | return 0; |
186 | } | |
187 | ||
ce320cf1 | 188 | static int alloc_frame(AVCodecContext *avctx, QSVContext *q, QSVFrame *frame) |
4e08c821 AK |
189 | { |
190 | int ret; | |
191 | ||
192 | ret = ff_get_buffer(avctx, frame->frame, AV_GET_BUFFER_FLAG_REF); | |
193 | if (ret < 0) | |
194 | return ret; | |
195 | ||
196 | if (frame->frame->format == AV_PIX_FMT_QSV) { | |
404e5147 | 197 | frame->surface = *(mfxFrameSurface1*)frame->frame->data[3]; |
4e08c821 | 198 | } else { |
404e5147 | 199 | frame->surface.Info = q->frame_info; |
4e08c821 | 200 | |
404e5147 AK |
201 | frame->surface.Data.PitchLow = frame->frame->linesize[0]; |
202 | frame->surface.Data.Y = frame->frame->data[0]; | |
203 | frame->surface.Data.UV = frame->frame->data[1]; | |
4e08c821 AK |
204 | } |
205 | ||
00aeedd8 AK |
206 | if (q->frames_ctx.mids) { |
207 | ret = ff_qsv_find_surface_idx(&q->frames_ctx, frame); | |
208 | if (ret < 0) | |
209 | return ret; | |
210 | ||
211 | frame->surface.Data.MemId = &q->frames_ctx.mids[ret]; | |
212 | } | |
213 | ||
404e5147 AK |
214 | frame->used = 1; |
215 | ||
4e08c821 AK |
216 | return 0; |
217 | } | |
218 | ||
219 | static void qsv_clear_unused_frames(QSVContext *q) | |
220 | { | |
221 | QSVFrame *cur = q->work_frames; | |
222 | while (cur) { | |
404e5147 AK |
223 | if (cur->used && !cur->surface.Data.Locked && !cur->queued) { |
224 | cur->used = 0; | |
4e08c821 AK |
225 | av_frame_unref(cur->frame); |
226 | } | |
227 | cur = cur->next; | |
228 | } | |
229 | } | |
230 | ||
231 | static int get_surface(AVCodecContext *avctx, QSVContext *q, mfxFrameSurface1 **surf) | |
232 | { | |
233 | QSVFrame *frame, **last; | |
234 | int ret; | |
235 | ||
236 | qsv_clear_unused_frames(q); | |
237 | ||
238 | frame = q->work_frames; | |
239 | last = &q->work_frames; | |
240 | while (frame) { | |
404e5147 | 241 | if (!frame->used) { |
ce320cf1 | 242 | ret = alloc_frame(avctx, q, frame); |
4e08c821 AK |
243 | if (ret < 0) |
244 | return ret; | |
404e5147 | 245 | *surf = &frame->surface; |
4e08c821 AK |
246 | return 0; |
247 | } | |
248 | ||
249 | last = &frame->next; | |
250 | frame = frame->next; | |
251 | } | |
252 | ||
253 | frame = av_mallocz(sizeof(*frame)); | |
254 | if (!frame) | |
255 | return AVERROR(ENOMEM); | |
256 | frame->frame = av_frame_alloc(); | |
257 | if (!frame->frame) { | |
258 | av_freep(&frame); | |
259 | return AVERROR(ENOMEM); | |
260 | } | |
261 | *last = frame; | |
262 | ||
ce320cf1 | 263 | ret = alloc_frame(avctx, q, frame); |
4e08c821 AK |
264 | if (ret < 0) |
265 | return ret; | |
266 | ||
404e5147 | 267 | *surf = &frame->surface; |
4e08c821 AK |
268 | |
269 | return 0; | |
270 | } | |
271 | ||
f5c4d38c | 272 | static QSVFrame *find_frame(QSVContext *q, mfxFrameSurface1 *surf) |
4e08c821 AK |
273 | { |
274 | QSVFrame *cur = q->work_frames; | |
275 | while (cur) { | |
404e5147 | 276 | if (surf == &cur->surface) |
f5c4d38c | 277 | return cur; |
4e08c821 AK |
278 | cur = cur->next; |
279 | } | |
280 | return NULL; | |
281 | } | |
282 | ||
96dca089 AK |
283 | static int qsv_decode(AVCodecContext *avctx, QSVContext *q, |
284 | AVFrame *frame, int *got_frame, | |
285 | AVPacket *avpkt) | |
4e08c821 | 286 | { |
f5c4d38c | 287 | QSVFrame *out_frame; |
4e08c821 AK |
288 | mfxFrameSurface1 *insurf; |
289 | mfxFrameSurface1 *outsurf; | |
3c53627a | 290 | mfxSyncPoint *sync; |
4e08c821 AK |
291 | mfxBitstream bs = { { { 0 } } }; |
292 | int ret; | |
293 | ||
294 | if (avpkt->size) { | |
295 | bs.Data = avpkt->data; | |
296 | bs.DataLength = avpkt->size; | |
297 | bs.MaxLength = bs.DataLength; | |
298 | bs.TimeStamp = avpkt->pts; | |
299 | } | |
300 | ||
3c53627a AK |
301 | sync = av_mallocz(sizeof(*sync)); |
302 | if (!sync) { | |
303 | av_freep(&sync); | |
304 | return AVERROR(ENOMEM); | |
305 | } | |
306 | ||
4e08c821 AK |
307 | do { |
308 | ret = get_surface(avctx, q, &insurf); | |
d32bdadd TG |
309 | if (ret < 0) { |
310 | av_freep(&sync); | |
4e08c821 | 311 | return ret; |
d32bdadd | 312 | } |
4e08c821 AK |
313 | |
314 | ret = MFXVideoDECODE_DecodeFrameAsync(q->session, avpkt->size ? &bs : NULL, | |
3c53627a | 315 | insurf, &outsurf, sync); |
4e08c821 AK |
316 | if (ret == MFX_WRN_DEVICE_BUSY) |
317 | av_usleep(1); | |
318 | ||
319 | } while (ret == MFX_WRN_DEVICE_BUSY || ret == MFX_ERR_MORE_SURFACE); | |
320 | ||
321 | if (ret != MFX_ERR_NONE && | |
322 | ret != MFX_ERR_MORE_DATA && | |
323 | ret != MFX_WRN_VIDEO_PARAM_CHANGED && | |
324 | ret != MFX_ERR_MORE_SURFACE) { | |
3c53627a | 325 | av_freep(&sync); |
95414eb2 AK |
326 | return ff_qsv_print_error(avctx, ret, |
327 | "Error during QSV decoding."); | |
4e08c821 AK |
328 | } |
329 | ||
aa9d15d8 AK |
330 | /* make sure we do not enter an infinite loop if the SDK |
331 | * did not consume any data and did not return anything */ | |
3c53627a | 332 | if (!*sync && !bs.DataOffset) { |
aa9d15d8 | 333 | bs.DataOffset = avpkt->size; |
0940b748 MT |
334 | ++q->zero_consume_run; |
335 | if (q->zero_consume_run > 1) | |
336 | ff_qsv_print_warning(avctx, ret, "A decode call did not consume any data"); | |
337 | } else { | |
338 | q->zero_consume_run = 0; | |
aa9d15d8 AK |
339 | } |
340 | ||
3c53627a | 341 | if (*sync) { |
f5c4d38c | 342 | QSVFrame *out_frame = find_frame(q, outsurf); |
4e08c821 | 343 | |
f5c4d38c | 344 | if (!out_frame) { |
4e08c821 AK |
345 | av_log(avctx, AV_LOG_ERROR, |
346 | "The returned surface does not correspond to any frame\n"); | |
3c53627a | 347 | av_freep(&sync); |
4e08c821 AK |
348 | return AVERROR_BUG; |
349 | } | |
350 | ||
f5c4d38c AK |
351 | out_frame->queued = 1; |
352 | av_fifo_generic_write(q->async_fifo, &out_frame, sizeof(out_frame), NULL); | |
353 | av_fifo_generic_write(q->async_fifo, &sync, sizeof(sync), NULL); | |
3c53627a AK |
354 | } else { |
355 | av_freep(&sync); | |
f5c4d38c AK |
356 | } |
357 | ||
358 | if (!av_fifo_space(q->async_fifo) || | |
359 | (!avpkt->size && av_fifo_size(q->async_fifo))) { | |
360 | AVFrame *src_frame; | |
361 | ||
362 | av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL); | |
363 | av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL); | |
364 | out_frame->queued = 0; | |
365 | ||
b68e3531 AK |
366 | if (avctx->pix_fmt != AV_PIX_FMT_QSV) { |
367 | do { | |
368 | ret = MFXVideoCORE_SyncOperation(q->session, *sync, 1000); | |
369 | } while (ret == MFX_WRN_IN_EXECUTION); | |
370 | } | |
f5c4d38c | 371 | |
3c53627a AK |
372 | av_freep(&sync); |
373 | ||
f5c4d38c AK |
374 | src_frame = out_frame->frame; |
375 | ||
4e08c821 AK |
376 | ret = av_frame_ref(frame, src_frame); |
377 | if (ret < 0) | |
378 | return ret; | |
379 | ||
404e5147 | 380 | outsurf = &out_frame->surface; |
f5c4d38c | 381 | |
32c83590 AK |
382 | #if FF_API_PKT_PTS |
383 | FF_DISABLE_DEPRECATION_WARNINGS | |
384 | frame->pkt_pts = outsurf->Data.TimeStamp; | |
385 | FF_ENABLE_DEPRECATION_WARNINGS | |
386 | #endif | |
387 | frame->pts = outsurf->Data.TimeStamp; | |
4e08c821 AK |
388 | |
389 | frame->repeat_pict = | |
390 | outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_TRIPLING ? 4 : | |
391 | outsurf->Info.PicStruct & MFX_PICSTRUCT_FRAME_DOUBLING ? 2 : | |
392 | outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_REPEATED ? 1 : 0; | |
393 | frame->top_field_first = | |
394 | outsurf->Info.PicStruct & MFX_PICSTRUCT_FIELD_TFF; | |
395 | frame->interlaced_frame = | |
396 | !(outsurf->Info.PicStruct & MFX_PICSTRUCT_PROGRESSIVE); | |
397 | ||
404e5147 AK |
398 | /* update the surface properties */ |
399 | if (avctx->pix_fmt == AV_PIX_FMT_QSV) | |
400 | ((mfxFrameSurface1*)frame->data[3])->Info = outsurf->Info; | |
401 | ||
4e08c821 AK |
402 | *got_frame = 1; |
403 | } | |
404 | ||
405 | return bs.DataOffset; | |
406 | } | |
407 | ||
9ba27c23 | 408 | int ff_qsv_decode_close(QSVContext *q) |
4e08c821 AK |
409 | { |
410 | QSVFrame *cur = q->work_frames; | |
411 | ||
2c32eace AK |
412 | if (q->session) |
413 | MFXVideoDECODE_Close(q->session); | |
414 | ||
3c53627a AK |
415 | while (q->async_fifo && av_fifo_size(q->async_fifo)) { |
416 | QSVFrame *out_frame; | |
417 | mfxSyncPoint *sync; | |
418 | ||
419 | av_fifo_generic_read(q->async_fifo, &out_frame, sizeof(out_frame), NULL); | |
420 | av_fifo_generic_read(q->async_fifo, &sync, sizeof(sync), NULL); | |
421 | ||
422 | av_freep(&sync); | |
423 | } | |
424 | ||
4e08c821 AK |
425 | while (cur) { |
426 | q->work_frames = cur->next; | |
427 | av_frame_free(&cur->frame); | |
428 | av_freep(&cur); | |
429 | cur = q->work_frames; | |
430 | } | |
431 | ||
f5c4d38c AK |
432 | av_fifo_free(q->async_fifo); |
433 | q->async_fifo = NULL; | |
434 | ||
96dca089 AK |
435 | av_parser_close(q->parser); |
436 | avcodec_free_context(&q->avctx_internal); | |
437 | ||
4e08c821 AK |
438 | if (q->internal_session) |
439 | MFXClose(q->internal_session); | |
440 | ||
a0524d9b | 441 | av_buffer_unref(&q->frames_ctx.hw_frames_ctx); |
4ab61cd9 | 442 | av_buffer_unref(&q->frames_ctx.mids_buf); |
a0524d9b | 443 | |
4e08c821 AK |
444 | return 0; |
445 | } | |
96dca089 AK |
446 | |
447 | int ff_qsv_process_data(AVCodecContext *avctx, QSVContext *q, | |
448 | AVFrame *frame, int *got_frame, AVPacket *pkt) | |
449 | { | |
450 | uint8_t *dummy_data; | |
451 | int dummy_size; | |
452 | int ret; | |
453 | ||
454 | if (!q->avctx_internal) { | |
455 | q->avctx_internal = avcodec_alloc_context3(NULL); | |
456 | if (!q->avctx_internal) | |
457 | return AVERROR(ENOMEM); | |
458 | ||
459 | if (avctx->extradata) { | |
059a9348 | 460 | q->avctx_internal->extradata = av_mallocz(avctx->extradata_size + AV_INPUT_BUFFER_PADDING_SIZE); |
96dca089 AK |
461 | if (!q->avctx_internal->extradata) |
462 | return AVERROR(ENOMEM); | |
463 | ||
464 | memcpy(q->avctx_internal->extradata, avctx->extradata, | |
465 | avctx->extradata_size); | |
466 | q->avctx_internal->extradata_size = avctx->extradata_size; | |
467 | } | |
468 | ||
469 | q->parser = av_parser_init(avctx->codec_id); | |
470 | if (!q->parser) | |
471 | return AVERROR(ENOMEM); | |
472 | ||
473 | q->parser->flags |= PARSER_FLAG_COMPLETE_FRAMES; | |
474 | q->orig_pix_fmt = AV_PIX_FMT_NONE; | |
475 | } | |
476 | ||
477 | if (!pkt->size) | |
478 | return qsv_decode(avctx, q, frame, got_frame, pkt); | |
479 | ||
480 | /* we assume the packets are already split properly and want | |
481 | * just the codec parameters here */ | |
482 | av_parser_parse2(q->parser, q->avctx_internal, | |
483 | &dummy_data, &dummy_size, | |
484 | pkt->data, pkt->size, pkt->pts, pkt->dts, | |
485 | pkt->pos); | |
486 | ||
487 | /* TODO: flush delayed frames on reinit */ | |
488 | if (q->parser->format != q->orig_pix_fmt || | |
489 | q->parser->coded_width != avctx->coded_width || | |
490 | q->parser->coded_height != avctx->coded_height) { | |
96dca089 AK |
491 | enum AVPixelFormat pix_fmts[3] = { AV_PIX_FMT_QSV, |
492 | AV_PIX_FMT_NONE, | |
493 | AV_PIX_FMT_NONE }; | |
494 | enum AVPixelFormat qsv_format; | |
495 | ||
536bb17e | 496 | qsv_format = ff_qsv_map_pixfmt(q->parser->format, &q->fourcc); |
96dca089 AK |
497 | if (qsv_format < 0) { |
498 | av_log(avctx, AV_LOG_ERROR, | |
92736c74 AK |
499 | "Decoding pixel format '%s' is not supported\n", |
500 | av_get_pix_fmt_name(q->parser->format)); | |
96dca089 AK |
501 | ret = AVERROR(ENOSYS); |
502 | goto reinit_fail; | |
503 | } | |
504 | ||
505 | q->orig_pix_fmt = q->parser->format; | |
506 | avctx->pix_fmt = pix_fmts[1] = qsv_format; | |
507 | avctx->width = q->parser->width; | |
508 | avctx->height = q->parser->height; | |
509 | avctx->coded_width = q->parser->coded_width; | |
510 | avctx->coded_height = q->parser->coded_height; | |
030d84fa | 511 | avctx->field_order = q->parser->field_order; |
96dca089 AK |
512 | avctx->level = q->avctx_internal->level; |
513 | avctx->profile = q->avctx_internal->profile; | |
514 | ||
515 | ret = ff_get_format(avctx, pix_fmts); | |
516 | if (ret < 0) | |
517 | goto reinit_fail; | |
518 | ||
519 | avctx->pix_fmt = ret; | |
520 | ||
6f19bbcf | 521 | ret = qsv_decode_init(avctx, q); |
96dca089 AK |
522 | if (ret < 0) |
523 | goto reinit_fail; | |
524 | } | |
525 | ||
526 | return qsv_decode(avctx, q, frame, got_frame, pkt); | |
527 | ||
528 | reinit_fail: | |
529 | q->orig_pix_fmt = q->parser->format = avctx->pix_fmt = AV_PIX_FMT_NONE; | |
530 | return ret; | |
531 | } | |
532 | ||
533 | void ff_qsv_decode_flush(AVCodecContext *avctx, QSVContext *q) | |
534 | { | |
535 | q->orig_pix_fmt = AV_PIX_FMT_NONE; | |
536 | } |