Commit | Line | Data |
---|---|---|
990e4a66 AK |
1 | /* |
2 | * Copyright (c) 2015 Anton Khirnov | |
3 | * | |
4 | * Permission is hereby granted, free of charge, to any person obtaining a copy | |
5 | * of this software and associated documentation files (the "Software"), to deal | |
6 | * in the Software without restriction, including without limitation the rights | |
7 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |
8 | * copies of the Software, and to permit persons to whom the Software is | |
9 | * furnished to do so, subject to the following conditions: | |
10 | * | |
11 | * The above copyright notice and this permission notice shall be included in | |
12 | * all copies or substantial portions of the Software. | |
13 | * | |
14 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |
15 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |
16 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL | |
17 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |
18 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |
19 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN | |
20 | * THE SOFTWARE. | |
21 | */ | |
22 | ||
23 | /** | |
24 | * @file | |
25 | * Intel QSV-accelerated H.264 decoding example. | |
26 | * | |
27 | * @example qsvdec.c | |
28 | * This example shows how to do QSV-accelerated H.264 decoding with output | |
29 | * frames in the VA-API video surfaces. | |
30 | */ | |
31 | ||
32 | #include "config.h" | |
33 | ||
34 | #include <stdio.h> | |
35 | ||
36 | #include <mfx/mfxvideo.h> | |
37 | ||
38 | #include <va/va.h> | |
39 | #include <va/va_x11.h> | |
40 | #include <X11/Xlib.h> | |
41 | ||
42 | #include "libavformat/avformat.h" | |
43 | #include "libavformat/avio.h" | |
44 | ||
45 | #include "libavcodec/avcodec.h" | |
46 | #include "libavcodec/qsv.h" | |
47 | ||
48 | #include "libavutil/error.h" | |
49 | #include "libavutil/mem.h" | |
50 | ||
51 | typedef struct DecodeContext { | |
52 | mfxSession mfx_session; | |
53 | VADisplay va_dpy; | |
54 | ||
55 | VASurfaceID *surfaces; | |
56 | mfxMemId *surface_ids; | |
57 | int *surface_used; | |
58 | int nb_surfaces; | |
59 | ||
60 | mfxFrameInfo frame_info; | |
61 | } DecodeContext; | |
62 | ||
63 | static mfxStatus frame_alloc(mfxHDL pthis, mfxFrameAllocRequest *req, | |
64 | mfxFrameAllocResponse *resp) | |
65 | { | |
66 | DecodeContext *decode = pthis; | |
67 | int err, i; | |
68 | ||
69 | if (decode->surfaces) { | |
70 | fprintf(stderr, "Multiple allocation requests.\n"); | |
71 | return MFX_ERR_MEMORY_ALLOC; | |
72 | } | |
73 | if (!(req->Type & MFX_MEMTYPE_VIDEO_MEMORY_DECODER_TARGET)) { | |
74 | fprintf(stderr, "Unsupported surface type: %d\n", req->Type); | |
75 | return MFX_ERR_UNSUPPORTED; | |
76 | } | |
77 | if (req->Info.BitDepthLuma != 8 || req->Info.BitDepthChroma != 8 || | |
78 | req->Info.Shift || req->Info.FourCC != MFX_FOURCC_NV12 || | |
79 | req->Info.ChromaFormat != MFX_CHROMAFORMAT_YUV420) { | |
80 | fprintf(stderr, "Unsupported surface properties.\n"); | |
81 | return MFX_ERR_UNSUPPORTED; | |
82 | } | |
83 | ||
84 | decode->surfaces = av_malloc_array (req->NumFrameSuggested, sizeof(*decode->surfaces)); | |
85 | decode->surface_ids = av_malloc_array (req->NumFrameSuggested, sizeof(*decode->surface_ids)); | |
86 | decode->surface_used = av_mallocz_array(req->NumFrameSuggested, sizeof(*decode->surface_used)); | |
87 | if (!decode->surfaces || !decode->surface_ids || !decode->surface_used) | |
88 | goto fail; | |
89 | ||
90 | err = vaCreateSurfaces(decode->va_dpy, VA_RT_FORMAT_YUV420, | |
91 | req->Info.Width, req->Info.Height, | |
92 | decode->surfaces, req->NumFrameSuggested, | |
93 | NULL, 0); | |
94 | if (err != VA_STATUS_SUCCESS) { | |
95 | fprintf(stderr, "Error allocating VA surfaces\n"); | |
96 | goto fail; | |
97 | } | |
98 | decode->nb_surfaces = req->NumFrameSuggested; | |
99 | ||
100 | for (i = 0; i < decode->nb_surfaces; i++) | |
101 | decode->surface_ids[i] = &decode->surfaces[i]; | |
102 | ||
103 | resp->mids = decode->surface_ids; | |
104 | resp->NumFrameActual = decode->nb_surfaces; | |
105 | ||
106 | decode->frame_info = req->Info; | |
107 | ||
108 | return MFX_ERR_NONE; | |
109 | fail: | |
110 | av_freep(&decode->surfaces); | |
111 | av_freep(&decode->surface_ids); | |
112 | av_freep(&decode->surface_used); | |
113 | ||
114 | return MFX_ERR_MEMORY_ALLOC; | |
115 | } | |
116 | ||
117 | static mfxStatus frame_free(mfxHDL pthis, mfxFrameAllocResponse *resp) | |
118 | { | |
119 | DecodeContext *decode = pthis; | |
120 | ||
121 | if (decode->surfaces) | |
122 | vaDestroySurfaces(decode->va_dpy, decode->surfaces, decode->nb_surfaces); | |
123 | av_freep(&decode->surfaces); | |
124 | av_freep(&decode->surface_ids); | |
125 | av_freep(&decode->surface_used); | |
126 | decode->nb_surfaces = 0; | |
127 | ||
128 | return MFX_ERR_NONE; | |
129 | } | |
130 | ||
131 | static mfxStatus frame_lock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr) | |
132 | { | |
133 | return MFX_ERR_UNSUPPORTED; | |
134 | } | |
135 | ||
136 | static mfxStatus frame_unlock(mfxHDL pthis, mfxMemId mid, mfxFrameData *ptr) | |
137 | { | |
138 | return MFX_ERR_UNSUPPORTED; | |
139 | } | |
140 | ||
141 | static mfxStatus frame_get_hdl(mfxHDL pthis, mfxMemId mid, mfxHDL *hdl) | |
142 | { | |
143 | *hdl = mid; | |
144 | return MFX_ERR_NONE; | |
145 | } | |
146 | ||
147 | static void free_buffer(void *opaque, uint8_t *data) | |
148 | { | |
149 | int *used = opaque; | |
150 | *used = 0; | |
151 | av_freep(&data); | |
152 | } | |
153 | ||
154 | static int get_buffer(AVCodecContext *avctx, AVFrame *frame, int flags) | |
155 | { | |
156 | DecodeContext *decode = avctx->opaque; | |
157 | ||
158 | mfxFrameSurface1 *surf; | |
159 | AVBufferRef *surf_buf; | |
160 | int idx; | |
161 | ||
162 | for (idx = 0; idx < decode->nb_surfaces; idx++) { | |
163 | if (!decode->surface_used[idx]) | |
164 | break; | |
165 | } | |
166 | if (idx == decode->nb_surfaces) { | |
167 | fprintf(stderr, "No free surfaces\n"); | |
168 | return AVERROR(ENOMEM); | |
169 | } | |
170 | ||
171 | surf = av_mallocz(sizeof(*surf)); | |
172 | if (!surf) | |
173 | return AVERROR(ENOMEM); | |
174 | surf_buf = av_buffer_create((uint8_t*)surf, sizeof(*surf), free_buffer, | |
175 | &decode->surface_used[idx], AV_BUFFER_FLAG_READONLY); | |
176 | if (!surf_buf) { | |
177 | av_freep(&surf); | |
178 | return AVERROR(ENOMEM); | |
179 | } | |
180 | ||
181 | surf->Info = decode->frame_info; | |
182 | surf->Data.MemId = &decode->surfaces[idx]; | |
183 | ||
184 | frame->buf[0] = surf_buf; | |
185 | frame->data[3] = (uint8_t*)surf; | |
186 | ||
187 | decode->surface_used[idx] = 1; | |
188 | ||
189 | return 0; | |
190 | } | |
191 | ||
192 | static int get_format(AVCodecContext *avctx, const enum AVPixelFormat *pix_fmts) | |
193 | { | |
194 | while (*pix_fmts != AV_PIX_FMT_NONE) { | |
195 | if (*pix_fmts == AV_PIX_FMT_QSV) { | |
196 | if (!avctx->hwaccel_context) { | |
197 | DecodeContext *decode = avctx->opaque; | |
198 | AVQSVContext *qsv = av_qsv_alloc_context(); | |
199 | if (!qsv) | |
200 | return AV_PIX_FMT_NONE; | |
201 | ||
202 | qsv->session = decode->mfx_session; | |
203 | qsv->iopattern = MFX_IOPATTERN_OUT_VIDEO_MEMORY; | |
204 | ||
205 | avctx->hwaccel_context = qsv; | |
206 | } | |
207 | ||
208 | return AV_PIX_FMT_QSV; | |
209 | } | |
210 | ||
211 | pix_fmts++; | |
212 | } | |
213 | ||
214 | fprintf(stderr, "The QSV pixel format not offered in get_format()\n"); | |
215 | ||
216 | return AV_PIX_FMT_NONE; | |
217 | } | |
218 | ||
219 | static int decode_packet(DecodeContext *decode, AVCodecContext *decoder_ctx, | |
220 | AVFrame *frame, AVPacket *pkt, | |
221 | AVIOContext *output_ctx) | |
222 | { | |
223 | int ret = 0; | |
224 | int got_frame = 1; | |
225 | ||
226 | while (pkt->size > 0 || (!pkt->data && got_frame)) { | |
227 | ret = avcodec_decode_video2(decoder_ctx, frame, &got_frame, pkt); | |
228 | if (ret < 0) { | |
229 | fprintf(stderr, "Error during decoding\n"); | |
230 | return ret; | |
231 | } | |
232 | ||
233 | pkt->data += ret; | |
234 | pkt->size -= ret; | |
235 | ||
236 | /* A real program would do something useful with the decoded frame here. | |
237 | * We just retrieve the raw data and write it to a file, which is rather | |
238 | * useless but pedagogic. */ | |
239 | if (got_frame) { | |
240 | mfxFrameSurface1 *surf = (mfxFrameSurface1*)frame->data[3]; | |
241 | VASurfaceID surface = *(VASurfaceID*)surf->Data.MemId; | |
242 | ||
243 | VAImageFormat img_fmt = { | |
244 | .fourcc = VA_FOURCC_NV12, | |
245 | .byte_order = VA_LSB_FIRST, | |
246 | .bits_per_pixel = 8, | |
247 | .depth = 8, | |
248 | }; | |
249 | ||
250 | VAImage img; | |
251 | ||
252 | VAStatus err; | |
253 | uint8_t *data; | |
254 | int i, j; | |
255 | ||
256 | img.buf = VA_INVALID_ID; | |
257 | img.image_id = VA_INVALID_ID; | |
258 | ||
259 | err = vaCreateImage(decode->va_dpy, &img_fmt, | |
260 | frame->width, frame->height, &img); | |
261 | if (err != VA_STATUS_SUCCESS) { | |
262 | fprintf(stderr, "Error creating an image: %s\n", | |
263 | vaErrorStr(err)); | |
264 | ret = AVERROR_UNKNOWN; | |
265 | goto fail; | |
266 | } | |
267 | ||
268 | err = vaGetImage(decode->va_dpy, surface, 0, 0, | |
269 | frame->width, frame->height, | |
270 | img.image_id); | |
271 | if (err != VA_STATUS_SUCCESS) { | |
272 | fprintf(stderr, "Error getting an image: %s\n", | |
273 | vaErrorStr(err)); | |
274 | ret = AVERROR_UNKNOWN; | |
275 | goto fail; | |
276 | } | |
277 | ||
278 | err = vaMapBuffer(decode->va_dpy, img.buf, (void**)&data); | |
279 | if (err != VA_STATUS_SUCCESS) { | |
280 | fprintf(stderr, "Error mapping the image buffer: %s\n", | |
281 | vaErrorStr(err)); | |
282 | ret = AVERROR_UNKNOWN; | |
283 | goto fail; | |
284 | } | |
285 | ||
286 | for (i = 0; i < img.num_planes; i++) | |
287 | for (j = 0; j < (img.height >> (i > 0)); j++) | |
288 | avio_write(output_ctx, data + img.offsets[i] + j * img.pitches[i], img.width); | |
289 | ||
290 | fail: | |
291 | if (img.buf != VA_INVALID_ID) | |
292 | vaUnmapBuffer(decode->va_dpy, img.buf); | |
293 | if (img.image_id != VA_INVALID_ID) | |
294 | vaDestroyImage(decode->va_dpy, img.image_id); | |
295 | av_frame_unref(frame); | |
296 | ||
297 | if (ret < 0) | |
298 | return ret; | |
299 | } | |
300 | } | |
301 | ||
302 | return 0; | |
303 | } | |
304 | ||
305 | int main(int argc, char **argv) | |
306 | { | |
307 | AVFormatContext *input_ctx = NULL; | |
308 | AVStream *video_st = NULL; | |
309 | AVCodecContext *decoder_ctx = NULL; | |
310 | const AVCodec *decoder; | |
311 | ||
312 | AVPacket pkt = { 0 }; | |
313 | AVFrame *frame = NULL; | |
314 | ||
315 | DecodeContext decode = { NULL }; | |
316 | ||
317 | Display *dpy = NULL; | |
318 | int va_ver_major, va_ver_minor; | |
319 | ||
320 | mfxIMPL mfx_impl = MFX_IMPL_AUTO_ANY; | |
321 | mfxVersion mfx_ver = { { 1, 1 } }; | |
322 | ||
323 | mfxFrameAllocator frame_allocator = { | |
324 | .pthis = &decode, | |
325 | .Alloc = frame_alloc, | |
326 | .Lock = frame_lock, | |
327 | .Unlock = frame_unlock, | |
328 | .GetHDL = frame_get_hdl, | |
329 | .Free = frame_free, | |
330 | }; | |
331 | ||
332 | AVIOContext *output_ctx = NULL; | |
333 | ||
334 | int ret, i, err; | |
335 | ||
336 | av_register_all(); | |
337 | ||
338 | if (argc < 3) { | |
339 | fprintf(stderr, "Usage: %s <input file> <output file>\n", argv[0]); | |
340 | return 1; | |
341 | } | |
342 | ||
343 | /* open the input file */ | |
344 | ret = avformat_open_input(&input_ctx, argv[1], NULL, NULL); | |
345 | if (ret < 0) { | |
346 | fprintf(stderr, "Cannot open input file '%s': ", argv[1]); | |
347 | goto finish; | |
348 | } | |
349 | ||
350 | /* find the first H.264 video stream */ | |
351 | for (i = 0; i < input_ctx->nb_streams; i++) { | |
352 | AVStream *st = input_ctx->streams[i]; | |
353 | ||
354 | if (st->codec->codec_id == AV_CODEC_ID_H264 && !video_st) | |
355 | video_st = st; | |
356 | else | |
357 | st->discard = AVDISCARD_ALL; | |
358 | } | |
359 | if (!video_st) { | |
360 | fprintf(stderr, "No H.264 video stream in the input file\n"); | |
361 | goto finish; | |
362 | } | |
363 | ||
364 | /* initialize VA-API */ | |
365 | dpy = XOpenDisplay(NULL); | |
366 | if (!dpy) { | |
367 | fprintf(stderr, "Cannot open the X display\n"); | |
368 | goto finish; | |
369 | } | |
370 | decode.va_dpy = vaGetDisplay(dpy); | |
371 | if (!decode.va_dpy) { | |
372 | fprintf(stderr, "Cannot open the VA display\n"); | |
373 | goto finish; | |
374 | } | |
375 | ||
376 | err = vaInitialize(decode.va_dpy, &va_ver_major, &va_ver_minor); | |
377 | if (err != VA_STATUS_SUCCESS) { | |
378 | fprintf(stderr, "Cannot initialize VA: %s\n", vaErrorStr(err)); | |
379 | goto finish; | |
380 | } | |
381 | fprintf(stderr, "Initialized VA v%d.%d\n", va_ver_major, va_ver_minor); | |
382 | ||
383 | /* initialize an MFX session */ | |
384 | err = MFXInit(mfx_impl, &mfx_ver, &decode.mfx_session); | |
385 | if (err != MFX_ERR_NONE) { | |
386 | fprintf(stderr, "Error initializing an MFX session\n"); | |
387 | goto finish; | |
388 | } | |
389 | ||
390 | MFXVideoCORE_SetHandle(decode.mfx_session, MFX_HANDLE_VA_DISPLAY, decode.va_dpy); | |
391 | MFXVideoCORE_SetFrameAllocator(decode.mfx_session, &frame_allocator); | |
392 | ||
393 | /* initialize the decoder */ | |
394 | decoder = avcodec_find_decoder_by_name("h264_qsv"); | |
395 | if (!decoder) { | |
396 | fprintf(stderr, "The QSV decoder is not present in libavcodec\n"); | |
397 | goto finish; | |
398 | } | |
399 | ||
400 | decoder_ctx = avcodec_alloc_context3(decoder); | |
401 | if (!decoder_ctx) { | |
402 | ret = AVERROR(ENOMEM); | |
403 | goto finish; | |
404 | } | |
405 | decoder_ctx->codec_id = AV_CODEC_ID_H264; | |
406 | if (video_st->codec->extradata_size) { | |
407 | decoder_ctx->extradata = av_mallocz(video_st->codec->extradata_size + | |
059a9348 | 408 | AV_INPUT_BUFFER_PADDING_SIZE); |
990e4a66 AK |
409 | if (!decoder_ctx->extradata) { |
410 | ret = AVERROR(ENOMEM); | |
411 | goto finish; | |
412 | } | |
413 | memcpy(decoder_ctx->extradata, video_st->codec->extradata, | |
414 | video_st->codec->extradata_size); | |
415 | decoder_ctx->extradata_size = video_st->codec->extradata_size; | |
416 | } | |
417 | decoder_ctx->refcounted_frames = 1; | |
418 | ||
419 | decoder_ctx->opaque = &decode; | |
420 | decoder_ctx->get_buffer2 = get_buffer; | |
421 | decoder_ctx->get_format = get_format; | |
422 | ||
423 | ret = avcodec_open2(decoder_ctx, NULL, NULL); | |
424 | if (ret < 0) { | |
425 | fprintf(stderr, "Error opening the decoder: "); | |
426 | goto finish; | |
427 | } | |
428 | ||
429 | /* open the output stream */ | |
430 | ret = avio_open(&output_ctx, argv[2], AVIO_FLAG_WRITE); | |
431 | if (ret < 0) { | |
432 | fprintf(stderr, "Error opening the output context: "); | |
433 | goto finish; | |
434 | } | |
435 | ||
436 | frame = av_frame_alloc(); | |
437 | if (!frame) { | |
438 | ret = AVERROR(ENOMEM); | |
439 | goto finish; | |
440 | } | |
441 | ||
442 | /* actual decoding */ | |
443 | while (ret >= 0) { | |
444 | ret = av_read_frame(input_ctx, &pkt); | |
445 | if (ret < 0) | |
446 | break; | |
447 | ||
448 | if (pkt.stream_index == video_st->index) | |
449 | ret = decode_packet(&decode, decoder_ctx, frame, &pkt, output_ctx); | |
450 | ||
451 | av_packet_unref(&pkt); | |
452 | } | |
453 | ||
454 | /* flush the decoder */ | |
455 | pkt.data = NULL; | |
456 | pkt.size = 0; | |
457 | ret = decode_packet(&decode, decoder_ctx, frame, &pkt, output_ctx); | |
458 | ||
459 | finish: | |
460 | if (ret < 0) { | |
461 | char buf[1024]; | |
462 | av_strerror(ret, buf, sizeof(buf)); | |
463 | fprintf(stderr, "%s\n", buf); | |
464 | } | |
465 | ||
466 | avformat_close_input(&input_ctx); | |
467 | ||
468 | av_frame_free(&frame); | |
469 | ||
470 | if (decode.mfx_session) | |
471 | MFXClose(decode.mfx_session); | |
472 | if (decode.va_dpy) | |
473 | vaTerminate(decode.va_dpy); | |
474 | if (dpy) | |
475 | XCloseDisplay(dpy); | |
476 | ||
477 | if (decoder_ctx) | |
478 | av_freep(&decoder_ctx->hwaccel_context); | |
479 | avcodec_free_context(&decoder_ctx); | |
480 | ||
481 | avio_close(output_ctx); | |
482 | ||
483 | return ret; | |
484 | } |