Commit | Line | Data |
---|---|---|
2c62fcdf MT |
1 | /* |
2 | * This file is part of Libav. | |
3 | * | |
4 | * Libav is free software; you can redistribute it and/or | |
5 | * modify it under the terms of the GNU Lesser General Public | |
6 | * License as published by the Free Software Foundation; either | |
7 | * version 2.1 of the License, or (at your option) any later version. | |
8 | * | |
9 | * Libav is distributed in the hope that it will be useful, | |
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
12 | * Lesser General Public License for more details. | |
13 | * | |
14 | * You should have received a copy of the GNU Lesser General Public | |
15 | * License along with Libav; if not, write to the Free Software | |
16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 | */ | |
18 | ||
19 | #include <va/va.h> | |
20 | #include <va/va_enc_h264.h> | |
21 | ||
22 | #include "libavutil/avassert.h" | |
23 | #include "libavutil/internal.h" | |
24 | #include "libavutil/opt.h" | |
25 | #include "libavutil/pixfmt.h" | |
26 | ||
27 | #include "avcodec.h" | |
28 | #include "h264.h" | |
29 | #include "internal.h" | |
30 | #include "vaapi_encode.h" | |
31 | #include "vaapi_encode_h26x.h" | |
32 | ||
33 | enum { | |
34 | SLICE_TYPE_P = 0, | |
35 | SLICE_TYPE_B = 1, | |
36 | SLICE_TYPE_I = 2, | |
37 | SLICE_TYPE_SP = 3, | |
38 | SLICE_TYPE_SI = 4, | |
39 | }; | |
40 | ||
41 | // This structure contains all possibly-useful per-sequence syntax elements | |
42 | // which are not already contained in the various VAAPI structures. | |
43 | typedef struct VAAPIEncodeH264MiscSequenceParams { | |
44 | unsigned int profile_idc; | |
45 | char constraint_set0_flag; | |
46 | char constraint_set1_flag; | |
47 | char constraint_set2_flag; | |
48 | char constraint_set3_flag; | |
49 | char constraint_set4_flag; | |
50 | char constraint_set5_flag; | |
51 | ||
52 | char separate_colour_plane_flag; | |
53 | char qpprime_y_zero_transform_bypass_flag; | |
54 | ||
55 | char gaps_in_frame_num_allowed_flag; | |
56 | char delta_pic_order_always_zero_flag; | |
57 | char bottom_field_pic_order_in_frame_present_flag; | |
58 | ||
59 | unsigned int num_slice_groups_minus1; | |
60 | unsigned int slice_group_map_type; | |
61 | ||
62 | int pic_init_qs_minus26; | |
63 | ||
64 | char vui_parameters_present_flag; | |
65 | } VAAPIEncodeH264MiscSequenceParams; | |
66 | ||
67 | // This structure contains all possibly-useful per-slice syntax elements | |
68 | // which are not already contained in the various VAAPI structures. | |
69 | typedef struct VAAPIEncodeH264MiscSliceParams { | |
70 | unsigned int nal_unit_type; | |
71 | unsigned int nal_ref_idc; | |
72 | ||
73 | unsigned int colour_plane_id; | |
74 | char field_pic_flag; | |
75 | char bottom_field_flag; | |
76 | ||
77 | unsigned int redundant_pic_cnt; | |
78 | ||
79 | char sp_for_switch_flag; | |
80 | int slice_qs_delta; | |
81 | ||
82 | char ref_pic_list_modification_flag_l0; | |
83 | char ref_pic_list_modification_flag_l1; | |
84 | ||
85 | char no_output_of_prior_pics_flag; | |
86 | char long_term_reference_flag; | |
87 | char adaptive_ref_pic_marking_mode_flag; | |
88 | } VAAPIEncodeH264MiscSliceParams; | |
89 | ||
90 | typedef struct VAAPIEncodeH264Slice { | |
91 | VAAPIEncodeH264MiscSliceParams misc_slice_params; | |
92 | } VAAPIEncodeH264Slice; | |
93 | ||
94 | typedef struct VAAPIEncodeH264Context { | |
95 | VAAPIEncodeH264MiscSequenceParams misc_sequence_params; | |
96 | ||
97 | int mb_width; | |
98 | int mb_height; | |
99 | ||
100 | int fixed_qp_idr; | |
101 | int fixed_qp_p; | |
102 | int fixed_qp_b; | |
103 | ||
104 | int64_t idr_pic_count; | |
105 | int64_t last_idr_frame; | |
106 | ||
6e8f66fc MT |
107 | // Rate control configuration. |
108 | struct { | |
109 | VAEncMiscParameterBuffer misc; | |
110 | VAEncMiscParameterRateControl rc; | |
111 | } rc_params; | |
112 | struct { | |
113 | VAEncMiscParameterBuffer misc; | |
114 | VAEncMiscParameterHRD hrd; | |
115 | } hrd_params; | |
2c62fcdf MT |
116 | } VAAPIEncodeH264Context; |
117 | ||
9629701c MT |
118 | typedef struct VAAPIEncodeH264Options { |
119 | int qp; | |
120 | } VAAPIEncodeH264Options; | |
121 | ||
2c62fcdf MT |
122 | |
123 | #define vseq_var(name) vseq->name, name | |
124 | #define vseq_field(name) vseq->seq_fields.bits.name, name | |
125 | #define vpic_var(name) vpic->name, name | |
126 | #define vpic_field(name) vpic->pic_fields.bits.name, name | |
127 | #define vslice_var(name) vslice->name, name | |
128 | #define vslice_field(name) vslice->slice_fields.bits.name, name | |
129 | #define mseq_var(name) mseq->name, name | |
130 | #define mslice_var(name) mslice->name, name | |
131 | ||
132 | static void vaapi_encode_h264_write_nal_header(PutBitContext *pbc, | |
133 | int nal_unit_type, int nal_ref_idc) | |
134 | { | |
135 | u(1, 0, forbidden_zero_bit); | |
136 | u(2, nal_ref_idc, nal_ref_idc); | |
137 | u(5, nal_unit_type, nal_unit_type); | |
138 | } | |
139 | ||
140 | static void vaapi_encode_h264_write_trailing_rbsp(PutBitContext *pbc) | |
141 | { | |
142 | u(1, 1, rbsp_stop_one_bit); | |
143 | while (put_bits_count(pbc) & 7) | |
144 | u(1, 0, rbsp_alignment_zero_bit); | |
145 | } | |
146 | ||
147 | static void vaapi_encode_h264_write_sps(PutBitContext *pbc, | |
148 | VAAPIEncodeContext *ctx) | |
149 | { | |
150 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
151 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
152 | VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params; | |
153 | int i; | |
154 | ||
155 | vaapi_encode_h264_write_nal_header(pbc, NAL_SPS, 3); | |
156 | ||
157 | u(8, mseq_var(profile_idc)); | |
158 | u(1, mseq_var(constraint_set0_flag)); | |
159 | u(1, mseq_var(constraint_set1_flag)); | |
160 | u(1, mseq_var(constraint_set2_flag)); | |
161 | u(1, mseq_var(constraint_set3_flag)); | |
162 | u(1, mseq_var(constraint_set4_flag)); | |
163 | u(1, mseq_var(constraint_set5_flag)); | |
164 | u(2, 0, reserved_zero_2bits); | |
165 | ||
166 | u(8, vseq_var(level_idc)); | |
167 | ||
168 | ue(vseq_var(seq_parameter_set_id)); | |
169 | ||
170 | if (mseq->profile_idc == 100 || mseq->profile_idc == 110 || | |
171 | mseq->profile_idc == 122 || mseq->profile_idc == 244 || | |
172 | mseq->profile_idc == 44 || mseq->profile_idc == 83 || | |
173 | mseq->profile_idc == 86 || mseq->profile_idc == 118 || | |
174 | mseq->profile_idc == 128 || mseq->profile_idc == 138) { | |
175 | ue(vseq_field(chroma_format_idc)); | |
176 | ||
177 | if (vseq->seq_fields.bits.chroma_format_idc == 3) | |
178 | u(1, mseq_var(separate_colour_plane_flag)); | |
179 | ||
180 | ue(vseq_var(bit_depth_luma_minus8)); | |
181 | ue(vseq_var(bit_depth_chroma_minus8)); | |
182 | ||
183 | u(1, mseq_var(qpprime_y_zero_transform_bypass_flag)); | |
184 | ||
185 | u(1, vseq_field(seq_scaling_matrix_present_flag)); | |
186 | if (vseq->seq_fields.bits.seq_scaling_matrix_present_flag) { | |
187 | av_assert0(0 && "scaling matrices not supported"); | |
188 | } | |
189 | } | |
190 | ||
191 | ue(vseq_field(log2_max_frame_num_minus4)); | |
192 | ue(vseq_field(pic_order_cnt_type)); | |
193 | ||
194 | if (vseq->seq_fields.bits.pic_order_cnt_type == 0) { | |
195 | ue(vseq_field(log2_max_pic_order_cnt_lsb_minus4)); | |
196 | } else if (vseq->seq_fields.bits.pic_order_cnt_type == 1) { | |
197 | u(1, mseq_var(delta_pic_order_always_zero_flag)); | |
198 | se(vseq_var(offset_for_non_ref_pic)); | |
199 | se(vseq_var(offset_for_top_to_bottom_field)); | |
200 | ue(vseq_var(num_ref_frames_in_pic_order_cnt_cycle)); | |
201 | ||
202 | for (i = 0; i < vseq->num_ref_frames_in_pic_order_cnt_cycle; i++) | |
203 | se(vseq_var(offset_for_ref_frame[i])); | |
204 | } | |
205 | ||
206 | ue(vseq_var(max_num_ref_frames)); | |
207 | u(1, mseq_var(gaps_in_frame_num_allowed_flag)); | |
208 | ||
209 | ue(vseq->picture_width_in_mbs - 1, pic_width_in_mbs_minus1); | |
210 | ue(vseq->picture_height_in_mbs - 1, pic_height_in_mbs_minus1); | |
211 | ||
212 | u(1, vseq_field(frame_mbs_only_flag)); | |
213 | if (!vseq->seq_fields.bits.frame_mbs_only_flag) | |
214 | u(1, vseq_field(mb_adaptive_frame_field_flag)); | |
215 | ||
216 | u(1, vseq_field(direct_8x8_inference_flag)); | |
217 | ||
218 | u(1, vseq_var(frame_cropping_flag)); | |
219 | if (vseq->frame_cropping_flag) { | |
220 | ue(vseq_var(frame_crop_left_offset)); | |
221 | ue(vseq_var(frame_crop_right_offset)); | |
222 | ue(vseq_var(frame_crop_top_offset)); | |
223 | ue(vseq_var(frame_crop_bottom_offset)); | |
224 | } | |
225 | ||
226 | u(1, mseq_var(vui_parameters_present_flag)); | |
227 | ||
228 | vaapi_encode_h264_write_trailing_rbsp(pbc); | |
229 | } | |
230 | ||
231 | static void vaapi_encode_h264_write_pps(PutBitContext *pbc, | |
232 | VAAPIEncodeContext *ctx) | |
233 | { | |
234 | VAEncPictureParameterBufferH264 *vpic = ctx->codec_picture_params; | |
235 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
236 | VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params; | |
237 | ||
238 | vaapi_encode_h264_write_nal_header(pbc, NAL_PPS, 3); | |
239 | ||
240 | ue(vpic_var(pic_parameter_set_id)); | |
241 | ue(vpic_var(seq_parameter_set_id)); | |
242 | ||
243 | u(1, vpic_field(entropy_coding_mode_flag)); | |
244 | u(1, mseq_var(bottom_field_pic_order_in_frame_present_flag)); | |
245 | ||
246 | ue(mseq_var(num_slice_groups_minus1)); | |
247 | if (mseq->num_slice_groups_minus1 > 0) { | |
248 | ue(mseq_var(slice_group_map_type)); | |
249 | av_assert0(0 && "slice groups not supported"); | |
250 | } | |
251 | ||
252 | ue(vpic_var(num_ref_idx_l0_active_minus1)); | |
253 | ue(vpic_var(num_ref_idx_l1_active_minus1)); | |
254 | ||
255 | u(1, vpic_field(weighted_pred_flag)); | |
256 | u(2, vpic_field(weighted_bipred_idc)); | |
257 | ||
258 | se(vpic->pic_init_qp - 26, pic_init_qp_minus26); | |
259 | se(mseq_var(pic_init_qs_minus26)); | |
260 | se(vpic_var(chroma_qp_index_offset)); | |
261 | ||
262 | u(1, vpic_field(deblocking_filter_control_present_flag)); | |
263 | u(1, vpic_field(constrained_intra_pred_flag)); | |
264 | u(1, vpic_field(redundant_pic_cnt_present_flag)); | |
265 | u(1, vpic_field(transform_8x8_mode_flag)); | |
266 | ||
267 | u(1, vpic_field(pic_scaling_matrix_present_flag)); | |
268 | if (vpic->pic_fields.bits.pic_scaling_matrix_present_flag) { | |
269 | av_assert0(0 && "scaling matrices not supported"); | |
270 | } | |
271 | ||
272 | se(vpic_var(second_chroma_qp_index_offset)); | |
273 | ||
274 | vaapi_encode_h264_write_trailing_rbsp(pbc); | |
275 | } | |
276 | ||
277 | static void vaapi_encode_h264_write_slice_header2(PutBitContext *pbc, | |
278 | VAAPIEncodeContext *ctx, | |
279 | VAAPIEncodePicture *pic, | |
280 | VAAPIEncodeSlice *slice) | |
281 | { | |
282 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
283 | VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params; | |
284 | VAEncSliceParameterBufferH264 *vslice = slice->codec_slice_params; | |
285 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
286 | VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params; | |
287 | VAAPIEncodeH264Slice *pslice = slice->priv_data; | |
288 | VAAPIEncodeH264MiscSliceParams *mslice = &pslice->misc_slice_params; | |
289 | ||
290 | vaapi_encode_h264_write_nal_header(pbc, mslice->nal_unit_type, | |
291 | mslice->nal_ref_idc); | |
292 | ||
293 | ue(vslice->macroblock_address, first_mb_in_slice); | |
294 | ue(vslice_var(slice_type)); | |
295 | ue(vpic_var(pic_parameter_set_id)); | |
296 | ||
297 | if (mseq->separate_colour_plane_flag) { | |
298 | u(2, mslice_var(colour_plane_id)); | |
299 | } | |
300 | ||
301 | u(4 + vseq->seq_fields.bits.log2_max_frame_num_minus4, | |
302 | (vpic->frame_num & | |
303 | ((1 << (4 + vseq->seq_fields.bits.log2_max_frame_num_minus4)) - 1)), | |
304 | frame_num); | |
305 | ||
306 | if (!vseq->seq_fields.bits.frame_mbs_only_flag) { | |
307 | u(1, mslice_var(field_pic_flag)); | |
308 | if (mslice->field_pic_flag) | |
309 | u(1, mslice_var(bottom_field_flag)); | |
310 | } | |
311 | ||
312 | if (vpic->pic_fields.bits.idr_pic_flag) { | |
313 | ue(vslice_var(idr_pic_id)); | |
314 | } | |
315 | ||
316 | if (vseq->seq_fields.bits.pic_order_cnt_type == 0) { | |
317 | u(4 + vseq->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4, | |
318 | vslice_var(pic_order_cnt_lsb)); | |
319 | if (mseq->bottom_field_pic_order_in_frame_present_flag && | |
320 | !mslice->field_pic_flag) { | |
321 | se(vslice_var(delta_pic_order_cnt_bottom)); | |
322 | } | |
323 | } | |
324 | ||
325 | if (vseq->seq_fields.bits.pic_order_cnt_type == 1 && | |
326 | !vseq->seq_fields.bits.delta_pic_order_always_zero_flag) { | |
327 | se(vslice_var(delta_pic_order_cnt[0])); | |
328 | if (mseq->bottom_field_pic_order_in_frame_present_flag && | |
329 | !mslice->field_pic_flag) { | |
330 | se(vslice_var(delta_pic_order_cnt[1])); | |
331 | } | |
332 | } | |
333 | ||
334 | if (vpic->pic_fields.bits.redundant_pic_cnt_present_flag) { | |
335 | ue(mslice_var(redundant_pic_cnt)); | |
336 | } | |
337 | ||
338 | if (vslice->slice_type == SLICE_TYPE_B) { | |
339 | u(1, vslice_var(direct_spatial_mv_pred_flag)); | |
340 | } | |
341 | ||
342 | if (vslice->slice_type == SLICE_TYPE_P || | |
343 | vslice->slice_type == SLICE_TYPE_SP || | |
344 | vslice->slice_type == SLICE_TYPE_B) { | |
345 | u(1, vslice_var(num_ref_idx_active_override_flag)); | |
346 | if (vslice->num_ref_idx_active_override_flag) { | |
347 | ue(vslice_var(num_ref_idx_l0_active_minus1)); | |
348 | if (vslice->slice_type == SLICE_TYPE_B) | |
349 | ue(vslice_var(num_ref_idx_l1_active_minus1)); | |
350 | } | |
351 | } | |
352 | ||
353 | if (mslice->nal_unit_type == 20 || mslice->nal_unit_type == 21) { | |
354 | av_assert0(0 && "no MVC support"); | |
355 | } else { | |
356 | if (vslice->slice_type % 5 != 2 && vslice->slice_type % 5 != 4) { | |
357 | u(1, mslice_var(ref_pic_list_modification_flag_l0)); | |
358 | if (mslice->ref_pic_list_modification_flag_l0) { | |
359 | av_assert0(0 && "ref pic list modification"); | |
360 | } | |
361 | } | |
362 | if (vslice->slice_type % 5 == 1) { | |
363 | u(1, mslice_var(ref_pic_list_modification_flag_l1)); | |
364 | if (mslice->ref_pic_list_modification_flag_l1) { | |
365 | av_assert0(0 && "ref pic list modification"); | |
366 | } | |
367 | } | |
368 | } | |
369 | ||
370 | if ((vpic->pic_fields.bits.weighted_pred_flag && | |
371 | (vslice->slice_type == SLICE_TYPE_P || | |
372 | vslice->slice_type == SLICE_TYPE_SP)) || | |
373 | (vpic->pic_fields.bits.weighted_bipred_idc == 1 && | |
374 | vslice->slice_type == SLICE_TYPE_B)) { | |
375 | av_assert0(0 && "prediction weights not supported"); | |
376 | } | |
377 | ||
378 | av_assert0(mslice->nal_ref_idc > 0 == | |
379 | vpic->pic_fields.bits.reference_pic_flag); | |
380 | if (mslice->nal_ref_idc != 0) { | |
381 | if (vpic->pic_fields.bits.idr_pic_flag) { | |
382 | u(1, mslice_var(no_output_of_prior_pics_flag)); | |
383 | u(1, mslice_var(long_term_reference_flag)); | |
384 | } else { | |
385 | u(1, mslice_var(adaptive_ref_pic_marking_mode_flag)); | |
386 | if (mslice->adaptive_ref_pic_marking_mode_flag) { | |
387 | av_assert0(0 && "MMCOs not supported"); | |
388 | } | |
389 | } | |
390 | } | |
391 | ||
392 | if (vpic->pic_fields.bits.entropy_coding_mode_flag && | |
393 | vslice->slice_type != SLICE_TYPE_I && | |
394 | vslice->slice_type != SLICE_TYPE_SI) { | |
395 | ue(vslice_var(cabac_init_idc)); | |
396 | } | |
397 | ||
398 | se(vslice_var(slice_qp_delta)); | |
399 | if (vslice->slice_type == SLICE_TYPE_SP || | |
400 | vslice->slice_type == SLICE_TYPE_SI) { | |
401 | if (vslice->slice_type == SLICE_TYPE_SP) | |
402 | u(1, mslice_var(sp_for_switch_flag)); | |
403 | se(mslice_var(slice_qs_delta)); | |
404 | } | |
405 | ||
406 | if (vpic->pic_fields.bits.deblocking_filter_control_present_flag) { | |
407 | ue(vslice_var(disable_deblocking_filter_idc)); | |
408 | if (vslice->disable_deblocking_filter_idc != 1) { | |
409 | se(vslice_var(slice_alpha_c0_offset_div2)); | |
410 | se(vslice_var(slice_beta_offset_div2)); | |
411 | } | |
412 | } | |
413 | ||
414 | if (mseq->num_slice_groups_minus1 > 0 && | |
415 | mseq->slice_group_map_type >= 3 && mseq->slice_group_map_type <= 5) { | |
416 | av_assert0(0 && "slice groups not supported"); | |
417 | } | |
418 | ||
419 | // No alignment - this need not be a byte boundary. | |
420 | } | |
421 | ||
422 | static int vaapi_encode_h264_write_sequence_header(AVCodecContext *avctx, | |
423 | char *data, size_t *data_len) | |
424 | { | |
425 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
426 | PutBitContext pbc; | |
427 | char tmp[256]; | |
428 | int err; | |
429 | size_t nal_len, bit_len, bit_pos, next_len; | |
430 | ||
431 | bit_len = *data_len; | |
432 | bit_pos = 0; | |
433 | ||
434 | init_put_bits(&pbc, tmp, sizeof(tmp)); | |
435 | vaapi_encode_h264_write_sps(&pbc, ctx); | |
436 | nal_len = put_bits_count(&pbc); | |
437 | flush_put_bits(&pbc); | |
438 | ||
439 | next_len = bit_len - bit_pos; | |
440 | err = ff_vaapi_encode_h26x_nal_unit_to_byte_stream(data + bit_pos / 8, | |
441 | &next_len, | |
442 | tmp, nal_len); | |
443 | if (err < 0) | |
444 | return err; | |
445 | bit_pos += next_len; | |
446 | ||
447 | init_put_bits(&pbc, tmp, sizeof(tmp)); | |
448 | vaapi_encode_h264_write_pps(&pbc, ctx); | |
449 | nal_len = put_bits_count(&pbc); | |
450 | flush_put_bits(&pbc); | |
451 | ||
452 | next_len = bit_len - bit_pos; | |
453 | err = ff_vaapi_encode_h26x_nal_unit_to_byte_stream(data + bit_pos / 8, | |
454 | &next_len, | |
455 | tmp, nal_len); | |
456 | if (err < 0) | |
457 | return err; | |
458 | bit_pos += next_len; | |
459 | ||
460 | *data_len = bit_pos; | |
461 | return 0; | |
462 | } | |
463 | ||
464 | static int vaapi_encode_h264_write_slice_header(AVCodecContext *avctx, | |
465 | VAAPIEncodePicture *pic, | |
466 | VAAPIEncodeSlice *slice, | |
467 | char *data, size_t *data_len) | |
468 | { | |
469 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
470 | PutBitContext pbc; | |
471 | char tmp[256]; | |
472 | size_t header_len; | |
473 | ||
474 | init_put_bits(&pbc, tmp, sizeof(tmp)); | |
475 | vaapi_encode_h264_write_slice_header2(&pbc, ctx, pic, slice); | |
476 | header_len = put_bits_count(&pbc); | |
477 | flush_put_bits(&pbc); | |
478 | ||
479 | return ff_vaapi_encode_h26x_nal_unit_to_byte_stream(data, data_len, | |
480 | tmp, header_len); | |
481 | } | |
482 | ||
483 | static int vaapi_encode_h264_init_sequence_params(AVCodecContext *avctx) | |
484 | { | |
485 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
486 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
487 | VAEncPictureParameterBufferH264 *vpic = ctx->codec_picture_params; | |
488 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
489 | VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params; | |
490 | int i; | |
491 | ||
492 | { | |
493 | vseq->seq_parameter_set_id = 0; | |
494 | ||
495 | vseq->level_idc = avctx->level; | |
496 | ||
497 | vseq->max_num_ref_frames = 2; | |
498 | ||
499 | vseq->picture_width_in_mbs = priv->mb_width; | |
500 | vseq->picture_height_in_mbs = priv->mb_height; | |
501 | ||
502 | vseq->seq_fields.bits.chroma_format_idc = 1; | |
503 | vseq->seq_fields.bits.frame_mbs_only_flag = 1; | |
504 | vseq->seq_fields.bits.direct_8x8_inference_flag = 1; | |
505 | vseq->seq_fields.bits.log2_max_frame_num_minus4 = 4; | |
506 | vseq->seq_fields.bits.pic_order_cnt_type = 0; | |
507 | ||
508 | if (ctx->input_width != ctx->aligned_width || | |
509 | ctx->input_height != ctx->aligned_height) { | |
510 | vseq->frame_cropping_flag = 1; | |
511 | ||
512 | vseq->frame_crop_left_offset = 0; | |
513 | vseq->frame_crop_right_offset = | |
514 | (ctx->aligned_width - ctx->input_width) / 2; | |
515 | vseq->frame_crop_top_offset = 0; | |
516 | vseq->frame_crop_bottom_offset = | |
517 | (ctx->aligned_height - ctx->input_height) / 2; | |
518 | } else { | |
519 | vseq->frame_cropping_flag = 0; | |
520 | } | |
6e8f66fc MT |
521 | |
522 | vseq->bits_per_second = avctx->bit_rate; | |
523 | if (avctx->framerate.num > 0 && avctx->framerate.den > 0) { | |
524 | vseq->num_units_in_tick = avctx->framerate.num; | |
525 | vseq->time_scale = 2 * avctx->framerate.den; | |
526 | } else { | |
527 | vseq->num_units_in_tick = avctx->time_base.num; | |
528 | vseq->time_scale = 2 * avctx->time_base.den; | |
529 | } | |
530 | ||
531 | vseq->intra_period = ctx->p_per_i * (ctx->b_per_p + 1); | |
532 | vseq->intra_idr_period = vseq->intra_period; | |
533 | vseq->ip_period = ctx->b_per_p + 1; | |
2c62fcdf MT |
534 | } |
535 | ||
536 | { | |
537 | vpic->CurrPic.picture_id = VA_INVALID_ID; | |
538 | vpic->CurrPic.flags = VA_PICTURE_H264_INVALID; | |
539 | ||
540 | for (i = 0; i < FF_ARRAY_ELEMS(vpic->ReferenceFrames); i++) { | |
541 | vpic->ReferenceFrames[i].picture_id = VA_INVALID_ID; | |
542 | vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; | |
543 | } | |
544 | ||
545 | vpic->coded_buf = VA_INVALID_ID; | |
546 | ||
547 | vpic->pic_parameter_set_id = 0; | |
548 | vpic->seq_parameter_set_id = 0; | |
549 | ||
550 | vpic->num_ref_idx_l0_active_minus1 = 0; | |
551 | vpic->num_ref_idx_l1_active_minus1 = 0; | |
552 | ||
553 | vpic->pic_fields.bits.entropy_coding_mode_flag = | |
554 | ((avctx->profile & 0xff) != 66); | |
555 | vpic->pic_fields.bits.weighted_pred_flag = 0; | |
556 | vpic->pic_fields.bits.weighted_bipred_idc = 0; | |
557 | vpic->pic_fields.bits.transform_8x8_mode_flag = | |
558 | ((avctx->profile & 0xff) >= 100); | |
559 | ||
560 | vpic->pic_init_qp = priv->fixed_qp_idr; | |
561 | } | |
562 | ||
563 | { | |
564 | mseq->profile_idc = avctx->profile & 0xff; | |
565 | ||
566 | if (avctx->profile & FF_PROFILE_H264_CONSTRAINED) | |
567 | mseq->constraint_set1_flag = 1; | |
568 | if (avctx->profile & FF_PROFILE_H264_INTRA) | |
569 | mseq->constraint_set3_flag = 1; | |
570 | } | |
571 | ||
572 | return 0; | |
573 | } | |
574 | ||
575 | static int vaapi_encode_h264_init_picture_params(AVCodecContext *avctx, | |
576 | VAAPIEncodePicture *pic) | |
577 | { | |
578 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
579 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
580 | VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params; | |
581 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
582 | int i; | |
583 | ||
584 | if (pic->type == PICTURE_TYPE_IDR) { | |
585 | av_assert0(pic->display_order == pic->encode_order); | |
586 | priv->last_idr_frame = pic->display_order; | |
587 | } else { | |
588 | av_assert0(pic->display_order > priv->last_idr_frame); | |
589 | } | |
590 | ||
591 | vpic->frame_num = (pic->encode_order - priv->last_idr_frame) & | |
592 | ((1 << (4 + vseq->seq_fields.bits.log2_max_frame_num_minus4)) - 1); | |
593 | ||
594 | vpic->CurrPic.picture_id = pic->recon_surface; | |
595 | vpic->CurrPic.frame_idx = vpic->frame_num; | |
596 | vpic->CurrPic.flags = 0; | |
597 | vpic->CurrPic.TopFieldOrderCnt = pic->display_order; | |
598 | vpic->CurrPic.BottomFieldOrderCnt = pic->display_order; | |
599 | ||
600 | for (i = 0; i < pic->nb_refs; i++) { | |
601 | VAAPIEncodePicture *ref = pic->refs[i]; | |
602 | av_assert0(ref && ref->encode_order >= priv->last_idr_frame); | |
603 | vpic->ReferenceFrames[i].picture_id = ref->recon_surface; | |
604 | vpic->ReferenceFrames[i].frame_idx = | |
605 | ref->encode_order - priv->last_idr_frame; | |
606 | vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; | |
607 | vpic->ReferenceFrames[i].TopFieldOrderCnt = ref->display_order; | |
608 | vpic->ReferenceFrames[i].BottomFieldOrderCnt = ref->display_order; | |
609 | } | |
610 | for (; i < FF_ARRAY_ELEMS(vpic->ReferenceFrames); i++) { | |
611 | vpic->ReferenceFrames[i].picture_id = VA_INVALID_ID; | |
612 | vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; | |
613 | } | |
614 | ||
615 | vpic->coded_buf = pic->output_buffer; | |
616 | ||
617 | vpic->pic_fields.bits.idr_pic_flag = (pic->type == PICTURE_TYPE_IDR); | |
618 | vpic->pic_fields.bits.reference_pic_flag = (pic->type != PICTURE_TYPE_B); | |
619 | ||
620 | pic->nb_slices = 1; | |
621 | ||
622 | return 0; | |
623 | } | |
624 | ||
625 | static int vaapi_encode_h264_init_slice_params(AVCodecContext *avctx, | |
626 | VAAPIEncodePicture *pic, | |
627 | VAAPIEncodeSlice *slice) | |
628 | { | |
629 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
630 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
631 | VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params; | |
632 | VAEncSliceParameterBufferH264 *vslice = slice->codec_slice_params; | |
633 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
634 | VAAPIEncodeH264Slice *pslice; | |
635 | VAAPIEncodeH264MiscSliceParams *mslice; | |
636 | int i; | |
637 | ||
638 | slice->priv_data = av_mallocz(sizeof(*pslice)); | |
639 | if (!slice->priv_data) | |
640 | return AVERROR(ENOMEM); | |
641 | pslice = slice->priv_data; | |
642 | mslice = &pslice->misc_slice_params; | |
643 | ||
644 | if (pic->type == PICTURE_TYPE_IDR) | |
645 | mslice->nal_unit_type = NAL_IDR_SLICE; | |
646 | else | |
647 | mslice->nal_unit_type = NAL_SLICE; | |
648 | ||
649 | switch (pic->type) { | |
650 | case PICTURE_TYPE_IDR: | |
651 | vslice->slice_type = SLICE_TYPE_I; | |
652 | mslice->nal_ref_idc = 3; | |
653 | break; | |
654 | case PICTURE_TYPE_I: | |
655 | vslice->slice_type = SLICE_TYPE_I; | |
656 | mslice->nal_ref_idc = 2; | |
657 | break; | |
658 | case PICTURE_TYPE_P: | |
659 | vslice->slice_type = SLICE_TYPE_P; | |
660 | mslice->nal_ref_idc = 1; | |
661 | break; | |
662 | case PICTURE_TYPE_B: | |
663 | vslice->slice_type = SLICE_TYPE_B; | |
664 | mslice->nal_ref_idc = 0; | |
665 | break; | |
666 | default: | |
667 | av_assert0(0 && "invalid picture type"); | |
668 | } | |
669 | ||
670 | // Only one slice per frame. | |
671 | vslice->macroblock_address = 0; | |
672 | vslice->num_macroblocks = priv->mb_width * priv->mb_height; | |
673 | ||
674 | vslice->macroblock_info = VA_INVALID_ID; | |
675 | ||
676 | vslice->pic_parameter_set_id = vpic->pic_parameter_set_id; | |
677 | vslice->idr_pic_id = priv->idr_pic_count++; | |
678 | ||
679 | vslice->pic_order_cnt_lsb = pic->display_order & | |
680 | ((1 << (4 + vseq->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4)) - 1); | |
681 | ||
682 | for (i = 0; i < FF_ARRAY_ELEMS(vslice->RefPicList0); i++) { | |
683 | vslice->RefPicList0[i].picture_id = VA_INVALID_ID; | |
684 | vslice->RefPicList0[i].flags = VA_PICTURE_H264_INVALID; | |
685 | vslice->RefPicList1[i].picture_id = VA_INVALID_ID; | |
686 | vslice->RefPicList1[i].flags = VA_PICTURE_H264_INVALID; | |
687 | } | |
688 | ||
689 | av_assert0(pic->nb_refs <= 2); | |
690 | if (pic->nb_refs >= 1) { | |
691 | // Backward reference for P or B frame. | |
692 | av_assert0(pic->type == PICTURE_TYPE_P || | |
693 | pic->type == PICTURE_TYPE_B); | |
694 | ||
695 | vslice->num_ref_idx_l0_active_minus1 = 0; | |
696 | vslice->RefPicList0[0] = vpic->ReferenceFrames[0]; | |
697 | } | |
698 | if (pic->nb_refs >= 2) { | |
699 | // Forward reference for B frame. | |
700 | av_assert0(pic->type == PICTURE_TYPE_B); | |
701 | ||
702 | vslice->num_ref_idx_l1_active_minus1 = 0; | |
703 | vslice->RefPicList1[0] = vpic->ReferenceFrames[1]; | |
704 | } | |
705 | ||
706 | if (pic->type == PICTURE_TYPE_B) | |
707 | vslice->slice_qp_delta = priv->fixed_qp_b - vpic->pic_init_qp; | |
708 | else if (pic->type == PICTURE_TYPE_P) | |
709 | vslice->slice_qp_delta = priv->fixed_qp_p - vpic->pic_init_qp; | |
710 | else | |
711 | vslice->slice_qp_delta = priv->fixed_qp_idr - vpic->pic_init_qp; | |
712 | ||
713 | vslice->direct_spatial_mv_pred_flag = 1; | |
714 | ||
715 | return 0; | |
716 | } | |
717 | ||
6e8f66fc MT |
718 | static av_cold int vaapi_encode_h264_init_constant_bitrate(AVCodecContext *avctx) |
719 | { | |
720 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
721 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
722 | int hrd_buffer_size; | |
723 | int hrd_initial_buffer_fullness; | |
724 | ||
725 | if (avctx->rc_buffer_size) | |
726 | hrd_buffer_size = avctx->rc_buffer_size; | |
727 | else | |
728 | hrd_buffer_size = avctx->bit_rate; | |
729 | if (avctx->rc_initial_buffer_occupancy) | |
730 | hrd_initial_buffer_fullness = avctx->rc_initial_buffer_occupancy; | |
731 | else | |
732 | hrd_initial_buffer_fullness = hrd_buffer_size * 3 / 4; | |
733 | ||
734 | priv->rc_params.misc.type = VAEncMiscParameterTypeRateControl; | |
735 | priv->rc_params.rc = (VAEncMiscParameterRateControl) { | |
736 | .bits_per_second = avctx->bit_rate, | |
737 | .target_percentage = 66, | |
738 | .window_size = 1000, | |
739 | .initial_qp = (avctx->qmax >= 0 ? avctx->qmax : 40), | |
740 | .min_qp = (avctx->qmin >= 0 ? avctx->qmin : 18), | |
741 | .basic_unit_size = 0, | |
742 | }; | |
743 | ctx->global_params[ctx->nb_global_params] = | |
744 | &priv->rc_params.misc; | |
745 | ctx->global_params_size[ctx->nb_global_params++] = | |
746 | sizeof(priv->rc_params); | |
747 | ||
748 | priv->hrd_params.misc.type = VAEncMiscParameterTypeHRD; | |
749 | priv->hrd_params.hrd = (VAEncMiscParameterHRD) { | |
750 | .initial_buffer_fullness = hrd_initial_buffer_fullness, | |
751 | .buffer_size = hrd_buffer_size, | |
752 | }; | |
753 | ctx->global_params[ctx->nb_global_params] = | |
754 | &priv->hrd_params.misc; | |
755 | ctx->global_params_size[ctx->nb_global_params++] = | |
756 | sizeof(priv->hrd_params); | |
757 | ||
758 | // These still need to be set for pic_init_qp/slice_qp_delta. | |
759 | priv->fixed_qp_idr = 26; | |
760 | priv->fixed_qp_p = 26; | |
761 | priv->fixed_qp_b = 26; | |
762 | ||
763 | av_log(avctx, AV_LOG_DEBUG, "Using constant-bitrate = %d bps.\n", | |
764 | avctx->bit_rate); | |
765 | return 0; | |
766 | } | |
767 | ||
768 | static av_cold int vaapi_encode_h264_init_fixed_qp(AVCodecContext *avctx) | |
769 | { | |
770 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
771 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
9629701c | 772 | VAAPIEncodeH264Options *opt = ctx->codec_options; |
6e8f66fc | 773 | |
9629701c | 774 | priv->fixed_qp_p = opt->qp; |
6e8f66fc MT |
775 | if (avctx->i_quant_factor > 0.0) |
776 | priv->fixed_qp_idr = (int)((priv->fixed_qp_p * avctx->i_quant_factor + | |
777 | avctx->i_quant_offset) + 0.5); | |
778 | else | |
779 | priv->fixed_qp_idr = priv->fixed_qp_p; | |
780 | if (avctx->b_quant_factor > 0.0) | |
781 | priv->fixed_qp_b = (int)((priv->fixed_qp_p * avctx->b_quant_factor + | |
782 | avctx->b_quant_offset) + 0.5); | |
783 | else | |
784 | priv->fixed_qp_b = priv->fixed_qp_p; | |
785 | ||
786 | av_log(avctx, AV_LOG_DEBUG, "Using fixed QP = " | |
787 | "%d / %d / %d for IDR / P / B frames.\n", | |
788 | priv->fixed_qp_idr, priv->fixed_qp_p, priv->fixed_qp_b); | |
789 | return 0; | |
790 | } | |
791 | ||
2c62fcdf MT |
792 | static av_cold int vaapi_encode_h264_init_internal(AVCodecContext *avctx) |
793 | { | |
f6b85523 MT |
794 | static const VAConfigAttrib default_config_attributes[] = { |
795 | { .type = VAConfigAttribRTFormat, | |
796 | .value = VA_RT_FORMAT_YUV420 }, | |
f6b85523 MT |
797 | { .type = VAConfigAttribEncPackedHeaders, |
798 | .value = (VA_ENC_PACKED_HEADER_SEQUENCE | | |
799 | VA_ENC_PACKED_HEADER_SLICE) }, | |
800 | }; | |
801 | ||
2c62fcdf MT |
802 | VAAPIEncodeContext *ctx = avctx->priv_data; |
803 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
6e8f66fc | 804 | int i, err; |
2c62fcdf MT |
805 | |
806 | switch (avctx->profile) { | |
807 | case FF_PROFILE_H264_CONSTRAINED_BASELINE: | |
808 | ctx->va_profile = VAProfileH264ConstrainedBaseline; | |
809 | break; | |
810 | case FF_PROFILE_H264_BASELINE: | |
811 | ctx->va_profile = VAProfileH264Baseline; | |
812 | break; | |
813 | case FF_PROFILE_H264_MAIN: | |
814 | ctx->va_profile = VAProfileH264Main; | |
815 | break; | |
816 | case FF_PROFILE_H264_EXTENDED: | |
817 | av_log(avctx, AV_LOG_ERROR, "H.264 extended profile " | |
818 | "is not supported.\n"); | |
819 | return AVERROR_PATCHWELCOME; | |
820 | case FF_PROFILE_UNKNOWN: | |
821 | case FF_PROFILE_H264_HIGH: | |
822 | ctx->va_profile = VAProfileH264High; | |
823 | break; | |
824 | case FF_PROFILE_H264_HIGH_10: | |
825 | case FF_PROFILE_H264_HIGH_10_INTRA: | |
826 | av_log(avctx, AV_LOG_ERROR, "H.264 10-bit profiles " | |
827 | "are not supported.\n"); | |
828 | return AVERROR_PATCHWELCOME; | |
829 | case FF_PROFILE_H264_HIGH_422: | |
830 | case FF_PROFILE_H264_HIGH_422_INTRA: | |
831 | case FF_PROFILE_H264_HIGH_444: | |
832 | case FF_PROFILE_H264_HIGH_444_PREDICTIVE: | |
833 | case FF_PROFILE_H264_HIGH_444_INTRA: | |
834 | case FF_PROFILE_H264_CAVLC_444: | |
835 | av_log(avctx, AV_LOG_ERROR, "H.264 non-4:2:0 profiles " | |
836 | "are not supported.\n"); | |
837 | return AVERROR_PATCHWELCOME; | |
838 | default: | |
839 | av_log(avctx, AV_LOG_ERROR, "Unknown H.264 profile %d.\n", | |
840 | avctx->profile); | |
841 | return AVERROR(EINVAL); | |
842 | } | |
843 | ctx->va_entrypoint = VAEntrypointEncSlice; | |
844 | ||
2c62fcdf MT |
845 | ctx->input_width = avctx->width; |
846 | ctx->input_height = avctx->height; | |
847 | ctx->aligned_width = FFALIGN(ctx->input_width, 16); | |
848 | ctx->aligned_height = FFALIGN(ctx->input_height, 16); | |
849 | priv->mb_width = ctx->aligned_width / 16; | |
850 | priv->mb_height = ctx->aligned_height / 16; | |
851 | ||
f6b85523 MT |
852 | for (i = 0; i < FF_ARRAY_ELEMS(default_config_attributes); i++) { |
853 | ctx->config_attributes[ctx->nb_config_attributes++] = | |
854 | default_config_attributes[i]; | |
855 | } | |
856 | ||
6e8f66fc MT |
857 | if (avctx->bit_rate > 0) { |
858 | ctx->va_rc_mode = VA_RC_CBR; | |
859 | err = vaapi_encode_h264_init_constant_bitrate(avctx); | |
860 | } else { | |
861 | ctx->va_rc_mode = VA_RC_CQP; | |
862 | err = vaapi_encode_h264_init_fixed_qp(avctx); | |
863 | } | |
864 | if (err < 0) | |
865 | return err; | |
866 | ||
867 | ctx->config_attributes[ctx->nb_config_attributes++] = (VAConfigAttrib) { | |
868 | .type = VAConfigAttribRateControl, | |
869 | .value = ctx->va_rc_mode, | |
870 | }; | |
2c62fcdf | 871 | |
2c62fcdf MT |
872 | ctx->nb_recon_frames = 20; |
873 | ||
874 | return 0; | |
875 | } | |
876 | ||
877 | static VAAPIEncodeType vaapi_encode_type_h264 = { | |
878 | .priv_data_size = sizeof(VAAPIEncodeH264Context), | |
879 | ||
880 | .init = &vaapi_encode_h264_init_internal, | |
881 | ||
882 | .sequence_params_size = sizeof(VAEncSequenceParameterBufferH264), | |
883 | .init_sequence_params = &vaapi_encode_h264_init_sequence_params, | |
884 | ||
885 | .picture_params_size = sizeof(VAEncPictureParameterBufferH264), | |
886 | .init_picture_params = &vaapi_encode_h264_init_picture_params, | |
887 | ||
888 | .slice_params_size = sizeof(VAEncSliceParameterBufferH264), | |
889 | .init_slice_params = &vaapi_encode_h264_init_slice_params, | |
890 | ||
891 | .sequence_header_type = VAEncPackedHeaderSequence, | |
892 | .write_sequence_header = &vaapi_encode_h264_write_sequence_header, | |
893 | ||
894 | .slice_header_type = VAEncPackedHeaderH264_Slice, | |
895 | .write_slice_header = &vaapi_encode_h264_write_slice_header, | |
896 | }; | |
897 | ||
898 | static av_cold int vaapi_encode_h264_init(AVCodecContext *avctx) | |
899 | { | |
900 | return ff_vaapi_encode_init(avctx, &vaapi_encode_type_h264); | |
901 | } | |
902 | ||
9629701c MT |
903 | #define OFFSET(x) (offsetof(VAAPIEncodeContext, codec_options_data) + \ |
904 | offsetof(VAAPIEncodeH264Options, x)) | |
905 | #define FLAGS (AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM) | |
906 | static const AVOption vaapi_encode_h264_options[] = { | |
907 | { "qp", "Constant QP (for P frames; scaled by qfactor/qoffset for I/B)", | |
908 | OFFSET(qp), AV_OPT_TYPE_INT, { .i64 = 20 }, 0, 52, FLAGS }, | |
909 | { NULL }, | |
910 | }; | |
911 | ||
2c62fcdf MT |
912 | static const AVCodecDefault vaapi_encode_h264_defaults[] = { |
913 | { "profile", "100" }, | |
914 | { "level", "51" }, | |
915 | { "b", "0" }, | |
916 | { "bf", "2" }, | |
917 | { "g", "120" }, | |
2c62fcdf MT |
918 | { "i_qfactor", "1.0" }, |
919 | { "i_qoffset", "0.0" }, | |
920 | { "b_qfactor", "1.2" }, | |
921 | { "b_qoffset", "0.0" }, | |
922 | { NULL }, | |
923 | }; | |
924 | ||
925 | static const AVClass vaapi_encode_h264_class = { | |
926 | .class_name = "h264_vaapi", | |
927 | .item_name = av_default_item_name, | |
9629701c | 928 | .option = vaapi_encode_h264_options, |
2c62fcdf MT |
929 | .version = LIBAVUTIL_VERSION_INT, |
930 | }; | |
931 | ||
932 | AVCodec ff_h264_vaapi_encoder = { | |
933 | .name = "h264_vaapi", | |
934 | .long_name = NULL_IF_CONFIG_SMALL("H.264/AVC (VAAPI)"), | |
935 | .type = AVMEDIA_TYPE_VIDEO, | |
936 | .id = AV_CODEC_ID_H264, | |
9629701c MT |
937 | .priv_data_size = (sizeof(VAAPIEncodeContext) + |
938 | sizeof(VAAPIEncodeH264Options)), | |
2c62fcdf MT |
939 | .init = &vaapi_encode_h264_init, |
940 | .encode2 = &ff_vaapi_encode2, | |
941 | .close = &ff_vaapi_encode_close, | |
942 | .priv_class = &vaapi_encode_h264_class, | |
943 | .capabilities = AV_CODEC_CAP_DELAY, | |
944 | .defaults = vaapi_encode_h264_defaults, | |
945 | .pix_fmts = (const enum AVPixelFormat[]) { | |
946 | AV_PIX_FMT_VAAPI, | |
947 | AV_PIX_FMT_NONE, | |
948 | }, | |
949 | }; |