Commit | Line | Data |
---|---|---|
c46db38c AK |
1 | /* |
2 | * This file is part of Libav. | |
3 | * | |
4 | * Libav is free software; you can redistribute it and/or | |
5 | * modify it under the terms of the GNU Lesser General Public | |
6 | * License as published by the Free Software Foundation; either | |
7 | * version 2.1 of the License, or (at your option) any later version. | |
8 | * | |
9 | * Libav is distributed in the hope that it will be useful, | |
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
12 | * Lesser General Public License for more details. | |
13 | * | |
14 | * You should have received a copy of the GNU Lesser General Public | |
15 | * License along with Libav; if not, write to the Free Software | |
16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 | */ | |
18 | ||
19 | #include <windows.h> | |
20 | ||
21 | #if !defined(_WIN32_WINNT) || _WIN32_WINNT < 0x0600 | |
22 | #undef _WIN32_WINNT | |
23 | #define _WIN32_WINNT 0x0600 | |
24 | #endif | |
25 | #define DXVA2API_USE_BITFIELDS | |
26 | #define COBJMACROS | |
27 | ||
28 | #include <d3d9.h> | |
29 | #include <dxva2api.h> | |
30 | #include <initguid.h> | |
31 | ||
32 | #include "common.h" | |
33 | #include "hwcontext.h" | |
34 | #include "hwcontext_dxva2.h" | |
35 | #include "hwcontext_internal.h" | |
36 | #include "imgutils.h" | |
37 | #include "pixdesc.h" | |
38 | #include "pixfmt.h" | |
39 | ||
40 | typedef struct DXVA2FramesContext { | |
41 | IDirect3DSurface9 **surfaces_internal; | |
42 | int nb_surfaces_used; | |
43 | ||
44 | HANDLE device_handle; | |
45 | IDirectXVideoAccelerationService *service; | |
46 | ||
47 | D3DFORMAT format; | |
48 | } DXVA2FramesContext; | |
49 | ||
50 | static const struct { | |
51 | D3DFORMAT d3d_format; | |
52 | enum AVPixelFormat pix_fmt; | |
53 | } supported_formats[] = { | |
54 | { MKTAG('N', 'V', '1', '2'), AV_PIX_FMT_NV12 }, | |
55 | }; | |
56 | ||
57 | DEFINE_GUID(video_decoder_service, 0xfc51a551, 0xd5e7, 0x11d9, 0xaf, 0x55, 0x00, 0x05, 0x4e, 0x43, 0xff, 0x02); | |
58 | DEFINE_GUID(video_processor_service, 0xfc51a552, 0xd5e7, 0x11d9, 0xaf, 0x55, 0x00, 0x05, 0x4e, 0x43, 0xff, 0x02); | |
59 | ||
60 | static void dxva2_frames_uninit(AVHWFramesContext *ctx) | |
61 | { | |
62 | AVDXVA2DeviceContext *device_hwctx = ctx->device_ctx->hwctx; | |
63 | AVDXVA2FramesContext *frames_hwctx = ctx->hwctx; | |
64 | DXVA2FramesContext *s = ctx->internal->priv; | |
65 | int i; | |
66 | ||
67 | if (frames_hwctx->decoder_to_release) | |
68 | IDirectXVideoDecoder_Release(frames_hwctx->decoder_to_release); | |
69 | ||
70 | if (s->surfaces_internal) { | |
71 | for (i = 0; i < frames_hwctx->nb_surfaces; i++) { | |
72 | if (s->surfaces_internal[i]) | |
73 | IDirect3DSurface9_Release(s->surfaces_internal[i]); | |
74 | } | |
75 | } | |
76 | av_freep(&s->surfaces_internal); | |
77 | ||
78 | if (s->service) { | |
79 | IDirectXVideoAccelerationService_Release(s->service); | |
80 | s->service = NULL; | |
81 | } | |
82 | ||
83 | if (s->device_handle != INVALID_HANDLE_VALUE) { | |
84 | IDirect3DDeviceManager9_CloseDeviceHandle(device_hwctx->devmgr, s->device_handle); | |
85 | s->device_handle = INVALID_HANDLE_VALUE; | |
86 | } | |
87 | } | |
88 | ||
89 | static AVBufferRef *dxva2_pool_alloc(void *opaque, int size) | |
90 | { | |
91 | AVHWFramesContext *ctx = (AVHWFramesContext*)opaque; | |
92 | DXVA2FramesContext *s = ctx->internal->priv; | |
93 | AVDXVA2FramesContext *hwctx = ctx->hwctx; | |
94 | ||
95 | if (s->nb_surfaces_used < hwctx->nb_surfaces) { | |
96 | s->nb_surfaces_used++; | |
97 | return av_buffer_create((uint8_t*)s->surfaces_internal[s->nb_surfaces_used - 1], | |
98 | sizeof(*hwctx->surfaces), NULL, 0, 0); | |
99 | } | |
100 | ||
101 | return NULL; | |
102 | } | |
103 | ||
104 | static int dxva2_init_pool(AVHWFramesContext *ctx) | |
105 | { | |
106 | AVDXVA2FramesContext *frames_hwctx = ctx->hwctx; | |
107 | AVDXVA2DeviceContext *device_hwctx = ctx->device_ctx->hwctx; | |
108 | DXVA2FramesContext *s = ctx->internal->priv; | |
109 | int decode = (frames_hwctx->surface_type == DXVA2_VideoDecoderRenderTarget); | |
110 | ||
111 | int i; | |
112 | HRESULT hr; | |
113 | ||
114 | if (ctx->initial_pool_size <= 0) | |
115 | return 0; | |
116 | ||
117 | hr = IDirect3DDeviceManager9_OpenDeviceHandle(device_hwctx->devmgr, &s->device_handle); | |
118 | if (FAILED(hr)) { | |
119 | av_log(ctx, AV_LOG_ERROR, "Failed to open device handle\n"); | |
120 | return AVERROR_UNKNOWN; | |
121 | } | |
122 | ||
123 | hr = IDirect3DDeviceManager9_GetVideoService(device_hwctx->devmgr, | |
124 | s->device_handle, | |
125 | decode ? &video_decoder_service : &video_processor_service, | |
126 | (void **)&s->service); | |
127 | if (FAILED(hr)) { | |
128 | av_log(ctx, AV_LOG_ERROR, "Failed to create the video service\n"); | |
129 | return AVERROR_UNKNOWN; | |
130 | } | |
131 | ||
132 | for (i = 0; i < FF_ARRAY_ELEMS(supported_formats); i++) { | |
133 | if (ctx->sw_format == supported_formats[i].pix_fmt) { | |
134 | s->format = supported_formats[i].d3d_format; | |
135 | break; | |
136 | } | |
137 | } | |
138 | if (i == FF_ARRAY_ELEMS(supported_formats)) { | |
139 | av_log(ctx, AV_LOG_ERROR, "Unsupported pixel format: %s\n", | |
140 | av_get_pix_fmt_name(ctx->sw_format)); | |
141 | return AVERROR(EINVAL); | |
142 | } | |
143 | ||
144 | s->surfaces_internal = av_mallocz_array(ctx->initial_pool_size, | |
145 | sizeof(*s->surfaces_internal)); | |
146 | if (!s->surfaces_internal) | |
147 | return AVERROR(ENOMEM); | |
148 | ||
149 | hr = IDirectXVideoAccelerationService_CreateSurface(s->service, | |
150 | ctx->width, ctx->height, | |
151 | ctx->initial_pool_size - 1, | |
152 | s->format, D3DPOOL_DEFAULT, 0, | |
153 | frames_hwctx->surface_type, | |
154 | s->surfaces_internal, NULL); | |
155 | if (FAILED(hr)) { | |
156 | av_log(ctx, AV_LOG_ERROR, "Could not create the surfaces\n"); | |
157 | return AVERROR_UNKNOWN; | |
158 | } | |
159 | ||
160 | ctx->internal->pool_internal = av_buffer_pool_init2(sizeof(*s->surfaces_internal), | |
161 | ctx, dxva2_pool_alloc, NULL); | |
162 | if (!ctx->internal->pool_internal) | |
163 | return AVERROR(ENOMEM); | |
164 | ||
165 | frames_hwctx->surfaces = s->surfaces_internal; | |
166 | frames_hwctx->nb_surfaces = ctx->initial_pool_size; | |
167 | ||
168 | return 0; | |
169 | } | |
170 | ||
171 | static int dxva2_frames_init(AVHWFramesContext *ctx) | |
172 | { | |
173 | AVDXVA2FramesContext *hwctx = ctx->hwctx; | |
174 | DXVA2FramesContext *s = ctx->internal->priv; | |
175 | int ret; | |
176 | ||
177 | if (hwctx->surface_type != DXVA2_VideoDecoderRenderTarget && | |
178 | hwctx->surface_type != DXVA2_VideoProcessorRenderTarget) { | |
179 | av_log(ctx, AV_LOG_ERROR, "Unknown surface type: %lu\n", | |
180 | hwctx->surface_type); | |
181 | return AVERROR(EINVAL); | |
182 | } | |
183 | ||
184 | s->device_handle = INVALID_HANDLE_VALUE; | |
185 | ||
186 | /* init the frame pool if the caller didn't provide one */ | |
187 | if (!ctx->pool) { | |
188 | ret = dxva2_init_pool(ctx); | |
189 | if (ret < 0) { | |
190 | av_log(ctx, AV_LOG_ERROR, "Error creating an internal frame pool\n"); | |
191 | return ret; | |
192 | } | |
193 | } | |
194 | ||
195 | return 0; | |
196 | } | |
197 | ||
198 | static int dxva2_get_buffer(AVHWFramesContext *ctx, AVFrame *frame) | |
199 | { | |
200 | frame->buf[0] = av_buffer_pool_get(ctx->pool); | |
201 | if (!frame->buf[0]) | |
202 | return AVERROR(ENOMEM); | |
203 | ||
204 | frame->data[3] = frame->buf[0]->data; | |
205 | frame->format = AV_PIX_FMT_DXVA2_VLD; | |
206 | frame->width = ctx->width; | |
207 | frame->height = ctx->height; | |
208 | ||
209 | return 0; | |
210 | } | |
211 | ||
212 | static int dxva2_transfer_get_formats(AVHWFramesContext *ctx, | |
213 | enum AVHWFrameTransferDirection dir, | |
214 | enum AVPixelFormat **formats) | |
215 | { | |
216 | enum AVPixelFormat *fmts; | |
217 | ||
218 | fmts = av_malloc_array(2, sizeof(*fmts)); | |
219 | if (!fmts) | |
220 | return AVERROR(ENOMEM); | |
221 | ||
222 | fmts[0] = ctx->sw_format; | |
223 | fmts[1] = AV_PIX_FMT_NONE; | |
224 | ||
225 | *formats = fmts; | |
226 | ||
227 | return 0; | |
228 | } | |
229 | ||
230 | static int dxva2_transfer_data(AVHWFramesContext *ctx, AVFrame *dst, | |
231 | const AVFrame *src) | |
232 | { | |
233 | IDirect3DSurface9 *surface; | |
234 | D3DSURFACE_DESC surfaceDesc; | |
235 | D3DLOCKED_RECT LockedRect; | |
236 | HRESULT hr; | |
237 | ||
238 | int download = !!src->hw_frames_ctx; | |
239 | ||
240 | surface = (IDirect3DSurface9*)(download ? src->data[3] : dst->data[3]); | |
241 | ||
242 | hr = IDirect3DSurface9_GetDesc(surface, &surfaceDesc); | |
243 | if (FAILED(hr)) { | |
244 | av_log(ctx, AV_LOG_ERROR, "Error getting a surface description\n"); | |
245 | return AVERROR_UNKNOWN; | |
246 | } | |
247 | ||
248 | hr = IDirect3DSurface9_LockRect(surface, &LockedRect, NULL, | |
249 | download ? D3DLOCK_READONLY : D3DLOCK_DISCARD); | |
250 | if (FAILED(hr)) { | |
251 | av_log(ctx, AV_LOG_ERROR, "Unable to lock DXVA2 surface\n"); | |
252 | return AVERROR_UNKNOWN; | |
253 | } | |
254 | ||
255 | if (download) { | |
256 | av_image_copy_plane(dst->data[0], dst->linesize[0], | |
257 | (uint8_t*)LockedRect.pBits, LockedRect.Pitch, | |
258 | src->width, src->height); | |
259 | av_image_copy_plane(dst->data[1], dst->linesize[1], | |
260 | (uint8_t*)LockedRect.pBits + LockedRect.Pitch * surfaceDesc.Height, | |
261 | LockedRect.Pitch, src->width, src->height / 2); | |
262 | } else { | |
263 | av_image_copy_plane((uint8_t*)LockedRect.pBits, LockedRect.Pitch, | |
264 | dst->data[0], dst->linesize[0], | |
265 | src->width, src->height); | |
266 | av_image_copy_plane((uint8_t*)LockedRect.pBits + LockedRect.Pitch * surfaceDesc.Height, | |
267 | LockedRect.Pitch, dst->data[1], dst->linesize[1], | |
268 | src->width, src->height / 2); | |
269 | } | |
270 | ||
271 | IDirect3DSurface9_UnlockRect(surface); | |
272 | ||
273 | return 0; | |
274 | } | |
275 | ||
276 | const HWContextType ff_hwcontext_type_dxva2 = { | |
277 | .type = AV_HWDEVICE_TYPE_DXVA2, | |
278 | .name = "DXVA2", | |
279 | ||
280 | .device_hwctx_size = sizeof(AVDXVA2DeviceContext), | |
281 | .frames_hwctx_size = sizeof(AVDXVA2FramesContext), | |
282 | .frames_priv_size = sizeof(DXVA2FramesContext), | |
283 | ||
284 | .frames_init = dxva2_frames_init, | |
285 | .frames_uninit = dxva2_frames_uninit, | |
286 | .frames_get_buffer = dxva2_get_buffer, | |
287 | .transfer_get_formats = dxva2_transfer_get_formats, | |
288 | .transfer_data_to = dxva2_transfer_data, | |
289 | .transfer_data_from = dxva2_transfer_data, | |
290 | ||
291 | .pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_DXVA2_VLD, AV_PIX_FMT_NONE }, | |
292 | }; |