Commit | Line | Data |
---|---|---|
2c62fcdf MT |
1 | /* |
2 | * This file is part of Libav. | |
3 | * | |
4 | * Libav is free software; you can redistribute it and/or | |
5 | * modify it under the terms of the GNU Lesser General Public | |
6 | * License as published by the Free Software Foundation; either | |
7 | * version 2.1 of the License, or (at your option) any later version. | |
8 | * | |
9 | * Libav is distributed in the hope that it will be useful, | |
10 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
11 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
12 | * Lesser General Public License for more details. | |
13 | * | |
14 | * You should have received a copy of the GNU Lesser General Public | |
15 | * License along with Libav; if not, write to the Free Software | |
16 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
17 | */ | |
18 | ||
19 | #include <va/va.h> | |
20 | #include <va/va_enc_h264.h> | |
21 | ||
22 | #include "libavutil/avassert.h" | |
23 | #include "libavutil/internal.h" | |
24 | #include "libavutil/opt.h" | |
25 | #include "libavutil/pixfmt.h" | |
26 | ||
27 | #include "avcodec.h" | |
28 | #include "h264.h" | |
29 | #include "internal.h" | |
30 | #include "vaapi_encode.h" | |
31 | #include "vaapi_encode_h26x.h" | |
32 | ||
33 | enum { | |
34 | SLICE_TYPE_P = 0, | |
35 | SLICE_TYPE_B = 1, | |
36 | SLICE_TYPE_I = 2, | |
37 | SLICE_TYPE_SP = 3, | |
38 | SLICE_TYPE_SI = 4, | |
39 | }; | |
40 | ||
41 | // This structure contains all possibly-useful per-sequence syntax elements | |
42 | // which are not already contained in the various VAAPI structures. | |
43 | typedef struct VAAPIEncodeH264MiscSequenceParams { | |
44 | unsigned int profile_idc; | |
45 | char constraint_set0_flag; | |
46 | char constraint_set1_flag; | |
47 | char constraint_set2_flag; | |
48 | char constraint_set3_flag; | |
49 | char constraint_set4_flag; | |
50 | char constraint_set5_flag; | |
51 | ||
52 | char separate_colour_plane_flag; | |
53 | char qpprime_y_zero_transform_bypass_flag; | |
54 | ||
55 | char gaps_in_frame_num_allowed_flag; | |
56 | char delta_pic_order_always_zero_flag; | |
57 | char bottom_field_pic_order_in_frame_present_flag; | |
58 | ||
59 | unsigned int num_slice_groups_minus1; | |
60 | unsigned int slice_group_map_type; | |
61 | ||
62 | int pic_init_qs_minus26; | |
63 | ||
64 | char vui_parameters_present_flag; | |
65 | } VAAPIEncodeH264MiscSequenceParams; | |
66 | ||
67 | // This structure contains all possibly-useful per-slice syntax elements | |
68 | // which are not already contained in the various VAAPI structures. | |
69 | typedef struct VAAPIEncodeH264MiscSliceParams { | |
70 | unsigned int nal_unit_type; | |
71 | unsigned int nal_ref_idc; | |
72 | ||
73 | unsigned int colour_plane_id; | |
74 | char field_pic_flag; | |
75 | char bottom_field_flag; | |
76 | ||
77 | unsigned int redundant_pic_cnt; | |
78 | ||
79 | char sp_for_switch_flag; | |
80 | int slice_qs_delta; | |
81 | ||
82 | char ref_pic_list_modification_flag_l0; | |
83 | char ref_pic_list_modification_flag_l1; | |
84 | ||
85 | char no_output_of_prior_pics_flag; | |
86 | char long_term_reference_flag; | |
87 | char adaptive_ref_pic_marking_mode_flag; | |
88 | } VAAPIEncodeH264MiscSliceParams; | |
89 | ||
90 | typedef struct VAAPIEncodeH264Slice { | |
91 | VAAPIEncodeH264MiscSliceParams misc_slice_params; | |
92 | } VAAPIEncodeH264Slice; | |
93 | ||
94 | typedef struct VAAPIEncodeH264Context { | |
95 | VAAPIEncodeH264MiscSequenceParams misc_sequence_params; | |
96 | ||
97 | int mb_width; | |
98 | int mb_height; | |
99 | ||
100 | int fixed_qp_idr; | |
101 | int fixed_qp_p; | |
102 | int fixed_qp_b; | |
103 | ||
104 | int64_t idr_pic_count; | |
105 | int64_t last_idr_frame; | |
106 | ||
6e8f66fc MT |
107 | // Rate control configuration. |
108 | struct { | |
109 | VAEncMiscParameterBuffer misc; | |
110 | VAEncMiscParameterRateControl rc; | |
111 | } rc_params; | |
112 | struct { | |
113 | VAEncMiscParameterBuffer misc; | |
114 | VAEncMiscParameterHRD hrd; | |
115 | } hrd_params; | |
2c62fcdf MT |
116 | } VAAPIEncodeH264Context; |
117 | ||
118 | ||
119 | #define vseq_var(name) vseq->name, name | |
120 | #define vseq_field(name) vseq->seq_fields.bits.name, name | |
121 | #define vpic_var(name) vpic->name, name | |
122 | #define vpic_field(name) vpic->pic_fields.bits.name, name | |
123 | #define vslice_var(name) vslice->name, name | |
124 | #define vslice_field(name) vslice->slice_fields.bits.name, name | |
125 | #define mseq_var(name) mseq->name, name | |
126 | #define mslice_var(name) mslice->name, name | |
127 | ||
128 | static void vaapi_encode_h264_write_nal_header(PutBitContext *pbc, | |
129 | int nal_unit_type, int nal_ref_idc) | |
130 | { | |
131 | u(1, 0, forbidden_zero_bit); | |
132 | u(2, nal_ref_idc, nal_ref_idc); | |
133 | u(5, nal_unit_type, nal_unit_type); | |
134 | } | |
135 | ||
136 | static void vaapi_encode_h264_write_trailing_rbsp(PutBitContext *pbc) | |
137 | { | |
138 | u(1, 1, rbsp_stop_one_bit); | |
139 | while (put_bits_count(pbc) & 7) | |
140 | u(1, 0, rbsp_alignment_zero_bit); | |
141 | } | |
142 | ||
143 | static void vaapi_encode_h264_write_sps(PutBitContext *pbc, | |
144 | VAAPIEncodeContext *ctx) | |
145 | { | |
146 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
147 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
148 | VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params; | |
149 | int i; | |
150 | ||
151 | vaapi_encode_h264_write_nal_header(pbc, NAL_SPS, 3); | |
152 | ||
153 | u(8, mseq_var(profile_idc)); | |
154 | u(1, mseq_var(constraint_set0_flag)); | |
155 | u(1, mseq_var(constraint_set1_flag)); | |
156 | u(1, mseq_var(constraint_set2_flag)); | |
157 | u(1, mseq_var(constraint_set3_flag)); | |
158 | u(1, mseq_var(constraint_set4_flag)); | |
159 | u(1, mseq_var(constraint_set5_flag)); | |
160 | u(2, 0, reserved_zero_2bits); | |
161 | ||
162 | u(8, vseq_var(level_idc)); | |
163 | ||
164 | ue(vseq_var(seq_parameter_set_id)); | |
165 | ||
166 | if (mseq->profile_idc == 100 || mseq->profile_idc == 110 || | |
167 | mseq->profile_idc == 122 || mseq->profile_idc == 244 || | |
168 | mseq->profile_idc == 44 || mseq->profile_idc == 83 || | |
169 | mseq->profile_idc == 86 || mseq->profile_idc == 118 || | |
170 | mseq->profile_idc == 128 || mseq->profile_idc == 138) { | |
171 | ue(vseq_field(chroma_format_idc)); | |
172 | ||
173 | if (vseq->seq_fields.bits.chroma_format_idc == 3) | |
174 | u(1, mseq_var(separate_colour_plane_flag)); | |
175 | ||
176 | ue(vseq_var(bit_depth_luma_minus8)); | |
177 | ue(vseq_var(bit_depth_chroma_minus8)); | |
178 | ||
179 | u(1, mseq_var(qpprime_y_zero_transform_bypass_flag)); | |
180 | ||
181 | u(1, vseq_field(seq_scaling_matrix_present_flag)); | |
182 | if (vseq->seq_fields.bits.seq_scaling_matrix_present_flag) { | |
183 | av_assert0(0 && "scaling matrices not supported"); | |
184 | } | |
185 | } | |
186 | ||
187 | ue(vseq_field(log2_max_frame_num_minus4)); | |
188 | ue(vseq_field(pic_order_cnt_type)); | |
189 | ||
190 | if (vseq->seq_fields.bits.pic_order_cnt_type == 0) { | |
191 | ue(vseq_field(log2_max_pic_order_cnt_lsb_minus4)); | |
192 | } else if (vseq->seq_fields.bits.pic_order_cnt_type == 1) { | |
193 | u(1, mseq_var(delta_pic_order_always_zero_flag)); | |
194 | se(vseq_var(offset_for_non_ref_pic)); | |
195 | se(vseq_var(offset_for_top_to_bottom_field)); | |
196 | ue(vseq_var(num_ref_frames_in_pic_order_cnt_cycle)); | |
197 | ||
198 | for (i = 0; i < vseq->num_ref_frames_in_pic_order_cnt_cycle; i++) | |
199 | se(vseq_var(offset_for_ref_frame[i])); | |
200 | } | |
201 | ||
202 | ue(vseq_var(max_num_ref_frames)); | |
203 | u(1, mseq_var(gaps_in_frame_num_allowed_flag)); | |
204 | ||
205 | ue(vseq->picture_width_in_mbs - 1, pic_width_in_mbs_minus1); | |
206 | ue(vseq->picture_height_in_mbs - 1, pic_height_in_mbs_minus1); | |
207 | ||
208 | u(1, vseq_field(frame_mbs_only_flag)); | |
209 | if (!vseq->seq_fields.bits.frame_mbs_only_flag) | |
210 | u(1, vseq_field(mb_adaptive_frame_field_flag)); | |
211 | ||
212 | u(1, vseq_field(direct_8x8_inference_flag)); | |
213 | ||
214 | u(1, vseq_var(frame_cropping_flag)); | |
215 | if (vseq->frame_cropping_flag) { | |
216 | ue(vseq_var(frame_crop_left_offset)); | |
217 | ue(vseq_var(frame_crop_right_offset)); | |
218 | ue(vseq_var(frame_crop_top_offset)); | |
219 | ue(vseq_var(frame_crop_bottom_offset)); | |
220 | } | |
221 | ||
222 | u(1, mseq_var(vui_parameters_present_flag)); | |
223 | ||
224 | vaapi_encode_h264_write_trailing_rbsp(pbc); | |
225 | } | |
226 | ||
227 | static void vaapi_encode_h264_write_pps(PutBitContext *pbc, | |
228 | VAAPIEncodeContext *ctx) | |
229 | { | |
230 | VAEncPictureParameterBufferH264 *vpic = ctx->codec_picture_params; | |
231 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
232 | VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params; | |
233 | ||
234 | vaapi_encode_h264_write_nal_header(pbc, NAL_PPS, 3); | |
235 | ||
236 | ue(vpic_var(pic_parameter_set_id)); | |
237 | ue(vpic_var(seq_parameter_set_id)); | |
238 | ||
239 | u(1, vpic_field(entropy_coding_mode_flag)); | |
240 | u(1, mseq_var(bottom_field_pic_order_in_frame_present_flag)); | |
241 | ||
242 | ue(mseq_var(num_slice_groups_minus1)); | |
243 | if (mseq->num_slice_groups_minus1 > 0) { | |
244 | ue(mseq_var(slice_group_map_type)); | |
245 | av_assert0(0 && "slice groups not supported"); | |
246 | } | |
247 | ||
248 | ue(vpic_var(num_ref_idx_l0_active_minus1)); | |
249 | ue(vpic_var(num_ref_idx_l1_active_minus1)); | |
250 | ||
251 | u(1, vpic_field(weighted_pred_flag)); | |
252 | u(2, vpic_field(weighted_bipred_idc)); | |
253 | ||
254 | se(vpic->pic_init_qp - 26, pic_init_qp_minus26); | |
255 | se(mseq_var(pic_init_qs_minus26)); | |
256 | se(vpic_var(chroma_qp_index_offset)); | |
257 | ||
258 | u(1, vpic_field(deblocking_filter_control_present_flag)); | |
259 | u(1, vpic_field(constrained_intra_pred_flag)); | |
260 | u(1, vpic_field(redundant_pic_cnt_present_flag)); | |
261 | u(1, vpic_field(transform_8x8_mode_flag)); | |
262 | ||
263 | u(1, vpic_field(pic_scaling_matrix_present_flag)); | |
264 | if (vpic->pic_fields.bits.pic_scaling_matrix_present_flag) { | |
265 | av_assert0(0 && "scaling matrices not supported"); | |
266 | } | |
267 | ||
268 | se(vpic_var(second_chroma_qp_index_offset)); | |
269 | ||
270 | vaapi_encode_h264_write_trailing_rbsp(pbc); | |
271 | } | |
272 | ||
273 | static void vaapi_encode_h264_write_slice_header2(PutBitContext *pbc, | |
274 | VAAPIEncodeContext *ctx, | |
275 | VAAPIEncodePicture *pic, | |
276 | VAAPIEncodeSlice *slice) | |
277 | { | |
278 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
279 | VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params; | |
280 | VAEncSliceParameterBufferH264 *vslice = slice->codec_slice_params; | |
281 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
282 | VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params; | |
283 | VAAPIEncodeH264Slice *pslice = slice->priv_data; | |
284 | VAAPIEncodeH264MiscSliceParams *mslice = &pslice->misc_slice_params; | |
285 | ||
286 | vaapi_encode_h264_write_nal_header(pbc, mslice->nal_unit_type, | |
287 | mslice->nal_ref_idc); | |
288 | ||
289 | ue(vslice->macroblock_address, first_mb_in_slice); | |
290 | ue(vslice_var(slice_type)); | |
291 | ue(vpic_var(pic_parameter_set_id)); | |
292 | ||
293 | if (mseq->separate_colour_plane_flag) { | |
294 | u(2, mslice_var(colour_plane_id)); | |
295 | } | |
296 | ||
297 | u(4 + vseq->seq_fields.bits.log2_max_frame_num_minus4, | |
298 | (vpic->frame_num & | |
299 | ((1 << (4 + vseq->seq_fields.bits.log2_max_frame_num_minus4)) - 1)), | |
300 | frame_num); | |
301 | ||
302 | if (!vseq->seq_fields.bits.frame_mbs_only_flag) { | |
303 | u(1, mslice_var(field_pic_flag)); | |
304 | if (mslice->field_pic_flag) | |
305 | u(1, mslice_var(bottom_field_flag)); | |
306 | } | |
307 | ||
308 | if (vpic->pic_fields.bits.idr_pic_flag) { | |
309 | ue(vslice_var(idr_pic_id)); | |
310 | } | |
311 | ||
312 | if (vseq->seq_fields.bits.pic_order_cnt_type == 0) { | |
313 | u(4 + vseq->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4, | |
314 | vslice_var(pic_order_cnt_lsb)); | |
315 | if (mseq->bottom_field_pic_order_in_frame_present_flag && | |
316 | !mslice->field_pic_flag) { | |
317 | se(vslice_var(delta_pic_order_cnt_bottom)); | |
318 | } | |
319 | } | |
320 | ||
321 | if (vseq->seq_fields.bits.pic_order_cnt_type == 1 && | |
322 | !vseq->seq_fields.bits.delta_pic_order_always_zero_flag) { | |
323 | se(vslice_var(delta_pic_order_cnt[0])); | |
324 | if (mseq->bottom_field_pic_order_in_frame_present_flag && | |
325 | !mslice->field_pic_flag) { | |
326 | se(vslice_var(delta_pic_order_cnt[1])); | |
327 | } | |
328 | } | |
329 | ||
330 | if (vpic->pic_fields.bits.redundant_pic_cnt_present_flag) { | |
331 | ue(mslice_var(redundant_pic_cnt)); | |
332 | } | |
333 | ||
334 | if (vslice->slice_type == SLICE_TYPE_B) { | |
335 | u(1, vslice_var(direct_spatial_mv_pred_flag)); | |
336 | } | |
337 | ||
338 | if (vslice->slice_type == SLICE_TYPE_P || | |
339 | vslice->slice_type == SLICE_TYPE_SP || | |
340 | vslice->slice_type == SLICE_TYPE_B) { | |
341 | u(1, vslice_var(num_ref_idx_active_override_flag)); | |
342 | if (vslice->num_ref_idx_active_override_flag) { | |
343 | ue(vslice_var(num_ref_idx_l0_active_minus1)); | |
344 | if (vslice->slice_type == SLICE_TYPE_B) | |
345 | ue(vslice_var(num_ref_idx_l1_active_minus1)); | |
346 | } | |
347 | } | |
348 | ||
349 | if (mslice->nal_unit_type == 20 || mslice->nal_unit_type == 21) { | |
350 | av_assert0(0 && "no MVC support"); | |
351 | } else { | |
352 | if (vslice->slice_type % 5 != 2 && vslice->slice_type % 5 != 4) { | |
353 | u(1, mslice_var(ref_pic_list_modification_flag_l0)); | |
354 | if (mslice->ref_pic_list_modification_flag_l0) { | |
355 | av_assert0(0 && "ref pic list modification"); | |
356 | } | |
357 | } | |
358 | if (vslice->slice_type % 5 == 1) { | |
359 | u(1, mslice_var(ref_pic_list_modification_flag_l1)); | |
360 | if (mslice->ref_pic_list_modification_flag_l1) { | |
361 | av_assert0(0 && "ref pic list modification"); | |
362 | } | |
363 | } | |
364 | } | |
365 | ||
366 | if ((vpic->pic_fields.bits.weighted_pred_flag && | |
367 | (vslice->slice_type == SLICE_TYPE_P || | |
368 | vslice->slice_type == SLICE_TYPE_SP)) || | |
369 | (vpic->pic_fields.bits.weighted_bipred_idc == 1 && | |
370 | vslice->slice_type == SLICE_TYPE_B)) { | |
371 | av_assert0(0 && "prediction weights not supported"); | |
372 | } | |
373 | ||
374 | av_assert0(mslice->nal_ref_idc > 0 == | |
375 | vpic->pic_fields.bits.reference_pic_flag); | |
376 | if (mslice->nal_ref_idc != 0) { | |
377 | if (vpic->pic_fields.bits.idr_pic_flag) { | |
378 | u(1, mslice_var(no_output_of_prior_pics_flag)); | |
379 | u(1, mslice_var(long_term_reference_flag)); | |
380 | } else { | |
381 | u(1, mslice_var(adaptive_ref_pic_marking_mode_flag)); | |
382 | if (mslice->adaptive_ref_pic_marking_mode_flag) { | |
383 | av_assert0(0 && "MMCOs not supported"); | |
384 | } | |
385 | } | |
386 | } | |
387 | ||
388 | if (vpic->pic_fields.bits.entropy_coding_mode_flag && | |
389 | vslice->slice_type != SLICE_TYPE_I && | |
390 | vslice->slice_type != SLICE_TYPE_SI) { | |
391 | ue(vslice_var(cabac_init_idc)); | |
392 | } | |
393 | ||
394 | se(vslice_var(slice_qp_delta)); | |
395 | if (vslice->slice_type == SLICE_TYPE_SP || | |
396 | vslice->slice_type == SLICE_TYPE_SI) { | |
397 | if (vslice->slice_type == SLICE_TYPE_SP) | |
398 | u(1, mslice_var(sp_for_switch_flag)); | |
399 | se(mslice_var(slice_qs_delta)); | |
400 | } | |
401 | ||
402 | if (vpic->pic_fields.bits.deblocking_filter_control_present_flag) { | |
403 | ue(vslice_var(disable_deblocking_filter_idc)); | |
404 | if (vslice->disable_deblocking_filter_idc != 1) { | |
405 | se(vslice_var(slice_alpha_c0_offset_div2)); | |
406 | se(vslice_var(slice_beta_offset_div2)); | |
407 | } | |
408 | } | |
409 | ||
410 | if (mseq->num_slice_groups_minus1 > 0 && | |
411 | mseq->slice_group_map_type >= 3 && mseq->slice_group_map_type <= 5) { | |
412 | av_assert0(0 && "slice groups not supported"); | |
413 | } | |
414 | ||
415 | // No alignment - this need not be a byte boundary. | |
416 | } | |
417 | ||
418 | static int vaapi_encode_h264_write_sequence_header(AVCodecContext *avctx, | |
419 | char *data, size_t *data_len) | |
420 | { | |
421 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
422 | PutBitContext pbc; | |
423 | char tmp[256]; | |
424 | int err; | |
425 | size_t nal_len, bit_len, bit_pos, next_len; | |
426 | ||
427 | bit_len = *data_len; | |
428 | bit_pos = 0; | |
429 | ||
430 | init_put_bits(&pbc, tmp, sizeof(tmp)); | |
431 | vaapi_encode_h264_write_sps(&pbc, ctx); | |
432 | nal_len = put_bits_count(&pbc); | |
433 | flush_put_bits(&pbc); | |
434 | ||
435 | next_len = bit_len - bit_pos; | |
436 | err = ff_vaapi_encode_h26x_nal_unit_to_byte_stream(data + bit_pos / 8, | |
437 | &next_len, | |
438 | tmp, nal_len); | |
439 | if (err < 0) | |
440 | return err; | |
441 | bit_pos += next_len; | |
442 | ||
443 | init_put_bits(&pbc, tmp, sizeof(tmp)); | |
444 | vaapi_encode_h264_write_pps(&pbc, ctx); | |
445 | nal_len = put_bits_count(&pbc); | |
446 | flush_put_bits(&pbc); | |
447 | ||
448 | next_len = bit_len - bit_pos; | |
449 | err = ff_vaapi_encode_h26x_nal_unit_to_byte_stream(data + bit_pos / 8, | |
450 | &next_len, | |
451 | tmp, nal_len); | |
452 | if (err < 0) | |
453 | return err; | |
454 | bit_pos += next_len; | |
455 | ||
456 | *data_len = bit_pos; | |
457 | return 0; | |
458 | } | |
459 | ||
460 | static int vaapi_encode_h264_write_slice_header(AVCodecContext *avctx, | |
461 | VAAPIEncodePicture *pic, | |
462 | VAAPIEncodeSlice *slice, | |
463 | char *data, size_t *data_len) | |
464 | { | |
465 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
466 | PutBitContext pbc; | |
467 | char tmp[256]; | |
468 | size_t header_len; | |
469 | ||
470 | init_put_bits(&pbc, tmp, sizeof(tmp)); | |
471 | vaapi_encode_h264_write_slice_header2(&pbc, ctx, pic, slice); | |
472 | header_len = put_bits_count(&pbc); | |
473 | flush_put_bits(&pbc); | |
474 | ||
475 | return ff_vaapi_encode_h26x_nal_unit_to_byte_stream(data, data_len, | |
476 | tmp, header_len); | |
477 | } | |
478 | ||
479 | static int vaapi_encode_h264_init_sequence_params(AVCodecContext *avctx) | |
480 | { | |
481 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
482 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
483 | VAEncPictureParameterBufferH264 *vpic = ctx->codec_picture_params; | |
484 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
485 | VAAPIEncodeH264MiscSequenceParams *mseq = &priv->misc_sequence_params; | |
486 | int i; | |
487 | ||
488 | { | |
489 | vseq->seq_parameter_set_id = 0; | |
490 | ||
491 | vseq->level_idc = avctx->level; | |
492 | ||
493 | vseq->max_num_ref_frames = 2; | |
494 | ||
495 | vseq->picture_width_in_mbs = priv->mb_width; | |
496 | vseq->picture_height_in_mbs = priv->mb_height; | |
497 | ||
498 | vseq->seq_fields.bits.chroma_format_idc = 1; | |
499 | vseq->seq_fields.bits.frame_mbs_only_flag = 1; | |
500 | vseq->seq_fields.bits.direct_8x8_inference_flag = 1; | |
501 | vseq->seq_fields.bits.log2_max_frame_num_minus4 = 4; | |
502 | vseq->seq_fields.bits.pic_order_cnt_type = 0; | |
503 | ||
504 | if (ctx->input_width != ctx->aligned_width || | |
505 | ctx->input_height != ctx->aligned_height) { | |
506 | vseq->frame_cropping_flag = 1; | |
507 | ||
508 | vseq->frame_crop_left_offset = 0; | |
509 | vseq->frame_crop_right_offset = | |
510 | (ctx->aligned_width - ctx->input_width) / 2; | |
511 | vseq->frame_crop_top_offset = 0; | |
512 | vseq->frame_crop_bottom_offset = | |
513 | (ctx->aligned_height - ctx->input_height) / 2; | |
514 | } else { | |
515 | vseq->frame_cropping_flag = 0; | |
516 | } | |
6e8f66fc MT |
517 | |
518 | vseq->bits_per_second = avctx->bit_rate; | |
519 | if (avctx->framerate.num > 0 && avctx->framerate.den > 0) { | |
520 | vseq->num_units_in_tick = avctx->framerate.num; | |
521 | vseq->time_scale = 2 * avctx->framerate.den; | |
522 | } else { | |
523 | vseq->num_units_in_tick = avctx->time_base.num; | |
524 | vseq->time_scale = 2 * avctx->time_base.den; | |
525 | } | |
526 | ||
527 | vseq->intra_period = ctx->p_per_i * (ctx->b_per_p + 1); | |
528 | vseq->intra_idr_period = vseq->intra_period; | |
529 | vseq->ip_period = ctx->b_per_p + 1; | |
2c62fcdf MT |
530 | } |
531 | ||
532 | { | |
533 | vpic->CurrPic.picture_id = VA_INVALID_ID; | |
534 | vpic->CurrPic.flags = VA_PICTURE_H264_INVALID; | |
535 | ||
536 | for (i = 0; i < FF_ARRAY_ELEMS(vpic->ReferenceFrames); i++) { | |
537 | vpic->ReferenceFrames[i].picture_id = VA_INVALID_ID; | |
538 | vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; | |
539 | } | |
540 | ||
541 | vpic->coded_buf = VA_INVALID_ID; | |
542 | ||
543 | vpic->pic_parameter_set_id = 0; | |
544 | vpic->seq_parameter_set_id = 0; | |
545 | ||
546 | vpic->num_ref_idx_l0_active_minus1 = 0; | |
547 | vpic->num_ref_idx_l1_active_minus1 = 0; | |
548 | ||
549 | vpic->pic_fields.bits.entropy_coding_mode_flag = | |
550 | ((avctx->profile & 0xff) != 66); | |
551 | vpic->pic_fields.bits.weighted_pred_flag = 0; | |
552 | vpic->pic_fields.bits.weighted_bipred_idc = 0; | |
553 | vpic->pic_fields.bits.transform_8x8_mode_flag = | |
554 | ((avctx->profile & 0xff) >= 100); | |
555 | ||
556 | vpic->pic_init_qp = priv->fixed_qp_idr; | |
557 | } | |
558 | ||
559 | { | |
560 | mseq->profile_idc = avctx->profile & 0xff; | |
561 | ||
562 | if (avctx->profile & FF_PROFILE_H264_CONSTRAINED) | |
563 | mseq->constraint_set1_flag = 1; | |
564 | if (avctx->profile & FF_PROFILE_H264_INTRA) | |
565 | mseq->constraint_set3_flag = 1; | |
566 | } | |
567 | ||
568 | return 0; | |
569 | } | |
570 | ||
571 | static int vaapi_encode_h264_init_picture_params(AVCodecContext *avctx, | |
572 | VAAPIEncodePicture *pic) | |
573 | { | |
574 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
575 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
576 | VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params; | |
577 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
578 | int i; | |
579 | ||
580 | if (pic->type == PICTURE_TYPE_IDR) { | |
581 | av_assert0(pic->display_order == pic->encode_order); | |
582 | priv->last_idr_frame = pic->display_order; | |
583 | } else { | |
584 | av_assert0(pic->display_order > priv->last_idr_frame); | |
585 | } | |
586 | ||
587 | vpic->frame_num = (pic->encode_order - priv->last_idr_frame) & | |
588 | ((1 << (4 + vseq->seq_fields.bits.log2_max_frame_num_minus4)) - 1); | |
589 | ||
590 | vpic->CurrPic.picture_id = pic->recon_surface; | |
591 | vpic->CurrPic.frame_idx = vpic->frame_num; | |
592 | vpic->CurrPic.flags = 0; | |
593 | vpic->CurrPic.TopFieldOrderCnt = pic->display_order; | |
594 | vpic->CurrPic.BottomFieldOrderCnt = pic->display_order; | |
595 | ||
596 | for (i = 0; i < pic->nb_refs; i++) { | |
597 | VAAPIEncodePicture *ref = pic->refs[i]; | |
598 | av_assert0(ref && ref->encode_order >= priv->last_idr_frame); | |
599 | vpic->ReferenceFrames[i].picture_id = ref->recon_surface; | |
600 | vpic->ReferenceFrames[i].frame_idx = | |
601 | ref->encode_order - priv->last_idr_frame; | |
602 | vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_SHORT_TERM_REFERENCE; | |
603 | vpic->ReferenceFrames[i].TopFieldOrderCnt = ref->display_order; | |
604 | vpic->ReferenceFrames[i].BottomFieldOrderCnt = ref->display_order; | |
605 | } | |
606 | for (; i < FF_ARRAY_ELEMS(vpic->ReferenceFrames); i++) { | |
607 | vpic->ReferenceFrames[i].picture_id = VA_INVALID_ID; | |
608 | vpic->ReferenceFrames[i].flags = VA_PICTURE_H264_INVALID; | |
609 | } | |
610 | ||
611 | vpic->coded_buf = pic->output_buffer; | |
612 | ||
613 | vpic->pic_fields.bits.idr_pic_flag = (pic->type == PICTURE_TYPE_IDR); | |
614 | vpic->pic_fields.bits.reference_pic_flag = (pic->type != PICTURE_TYPE_B); | |
615 | ||
616 | pic->nb_slices = 1; | |
617 | ||
618 | return 0; | |
619 | } | |
620 | ||
621 | static int vaapi_encode_h264_init_slice_params(AVCodecContext *avctx, | |
622 | VAAPIEncodePicture *pic, | |
623 | VAAPIEncodeSlice *slice) | |
624 | { | |
625 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
626 | VAEncSequenceParameterBufferH264 *vseq = ctx->codec_sequence_params; | |
627 | VAEncPictureParameterBufferH264 *vpic = pic->codec_picture_params; | |
628 | VAEncSliceParameterBufferH264 *vslice = slice->codec_slice_params; | |
629 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
630 | VAAPIEncodeH264Slice *pslice; | |
631 | VAAPIEncodeH264MiscSliceParams *mslice; | |
632 | int i; | |
633 | ||
634 | slice->priv_data = av_mallocz(sizeof(*pslice)); | |
635 | if (!slice->priv_data) | |
636 | return AVERROR(ENOMEM); | |
637 | pslice = slice->priv_data; | |
638 | mslice = &pslice->misc_slice_params; | |
639 | ||
640 | if (pic->type == PICTURE_TYPE_IDR) | |
641 | mslice->nal_unit_type = NAL_IDR_SLICE; | |
642 | else | |
643 | mslice->nal_unit_type = NAL_SLICE; | |
644 | ||
645 | switch (pic->type) { | |
646 | case PICTURE_TYPE_IDR: | |
647 | vslice->slice_type = SLICE_TYPE_I; | |
648 | mslice->nal_ref_idc = 3; | |
649 | break; | |
650 | case PICTURE_TYPE_I: | |
651 | vslice->slice_type = SLICE_TYPE_I; | |
652 | mslice->nal_ref_idc = 2; | |
653 | break; | |
654 | case PICTURE_TYPE_P: | |
655 | vslice->slice_type = SLICE_TYPE_P; | |
656 | mslice->nal_ref_idc = 1; | |
657 | break; | |
658 | case PICTURE_TYPE_B: | |
659 | vslice->slice_type = SLICE_TYPE_B; | |
660 | mslice->nal_ref_idc = 0; | |
661 | break; | |
662 | default: | |
663 | av_assert0(0 && "invalid picture type"); | |
664 | } | |
665 | ||
666 | // Only one slice per frame. | |
667 | vslice->macroblock_address = 0; | |
668 | vslice->num_macroblocks = priv->mb_width * priv->mb_height; | |
669 | ||
670 | vslice->macroblock_info = VA_INVALID_ID; | |
671 | ||
672 | vslice->pic_parameter_set_id = vpic->pic_parameter_set_id; | |
673 | vslice->idr_pic_id = priv->idr_pic_count++; | |
674 | ||
675 | vslice->pic_order_cnt_lsb = pic->display_order & | |
676 | ((1 << (4 + vseq->seq_fields.bits.log2_max_pic_order_cnt_lsb_minus4)) - 1); | |
677 | ||
678 | for (i = 0; i < FF_ARRAY_ELEMS(vslice->RefPicList0); i++) { | |
679 | vslice->RefPicList0[i].picture_id = VA_INVALID_ID; | |
680 | vslice->RefPicList0[i].flags = VA_PICTURE_H264_INVALID; | |
681 | vslice->RefPicList1[i].picture_id = VA_INVALID_ID; | |
682 | vslice->RefPicList1[i].flags = VA_PICTURE_H264_INVALID; | |
683 | } | |
684 | ||
685 | av_assert0(pic->nb_refs <= 2); | |
686 | if (pic->nb_refs >= 1) { | |
687 | // Backward reference for P or B frame. | |
688 | av_assert0(pic->type == PICTURE_TYPE_P || | |
689 | pic->type == PICTURE_TYPE_B); | |
690 | ||
691 | vslice->num_ref_idx_l0_active_minus1 = 0; | |
692 | vslice->RefPicList0[0] = vpic->ReferenceFrames[0]; | |
693 | } | |
694 | if (pic->nb_refs >= 2) { | |
695 | // Forward reference for B frame. | |
696 | av_assert0(pic->type == PICTURE_TYPE_B); | |
697 | ||
698 | vslice->num_ref_idx_l1_active_minus1 = 0; | |
699 | vslice->RefPicList1[0] = vpic->ReferenceFrames[1]; | |
700 | } | |
701 | ||
702 | if (pic->type == PICTURE_TYPE_B) | |
703 | vslice->slice_qp_delta = priv->fixed_qp_b - vpic->pic_init_qp; | |
704 | else if (pic->type == PICTURE_TYPE_P) | |
705 | vslice->slice_qp_delta = priv->fixed_qp_p - vpic->pic_init_qp; | |
706 | else | |
707 | vslice->slice_qp_delta = priv->fixed_qp_idr - vpic->pic_init_qp; | |
708 | ||
709 | vslice->direct_spatial_mv_pred_flag = 1; | |
710 | ||
711 | return 0; | |
712 | } | |
713 | ||
6e8f66fc MT |
714 | static av_cold int vaapi_encode_h264_init_constant_bitrate(AVCodecContext *avctx) |
715 | { | |
716 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
717 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
718 | int hrd_buffer_size; | |
719 | int hrd_initial_buffer_fullness; | |
720 | ||
721 | if (avctx->rc_buffer_size) | |
722 | hrd_buffer_size = avctx->rc_buffer_size; | |
723 | else | |
724 | hrd_buffer_size = avctx->bit_rate; | |
725 | if (avctx->rc_initial_buffer_occupancy) | |
726 | hrd_initial_buffer_fullness = avctx->rc_initial_buffer_occupancy; | |
727 | else | |
728 | hrd_initial_buffer_fullness = hrd_buffer_size * 3 / 4; | |
729 | ||
730 | priv->rc_params.misc.type = VAEncMiscParameterTypeRateControl; | |
731 | priv->rc_params.rc = (VAEncMiscParameterRateControl) { | |
732 | .bits_per_second = avctx->bit_rate, | |
733 | .target_percentage = 66, | |
734 | .window_size = 1000, | |
735 | .initial_qp = (avctx->qmax >= 0 ? avctx->qmax : 40), | |
736 | .min_qp = (avctx->qmin >= 0 ? avctx->qmin : 18), | |
737 | .basic_unit_size = 0, | |
738 | }; | |
739 | ctx->global_params[ctx->nb_global_params] = | |
740 | &priv->rc_params.misc; | |
741 | ctx->global_params_size[ctx->nb_global_params++] = | |
742 | sizeof(priv->rc_params); | |
743 | ||
744 | priv->hrd_params.misc.type = VAEncMiscParameterTypeHRD; | |
745 | priv->hrd_params.hrd = (VAEncMiscParameterHRD) { | |
746 | .initial_buffer_fullness = hrd_initial_buffer_fullness, | |
747 | .buffer_size = hrd_buffer_size, | |
748 | }; | |
749 | ctx->global_params[ctx->nb_global_params] = | |
750 | &priv->hrd_params.misc; | |
751 | ctx->global_params_size[ctx->nb_global_params++] = | |
752 | sizeof(priv->hrd_params); | |
753 | ||
754 | // These still need to be set for pic_init_qp/slice_qp_delta. | |
755 | priv->fixed_qp_idr = 26; | |
756 | priv->fixed_qp_p = 26; | |
757 | priv->fixed_qp_b = 26; | |
758 | ||
759 | av_log(avctx, AV_LOG_DEBUG, "Using constant-bitrate = %d bps.\n", | |
760 | avctx->bit_rate); | |
761 | return 0; | |
762 | } | |
763 | ||
764 | static av_cold int vaapi_encode_h264_init_fixed_qp(AVCodecContext *avctx) | |
765 | { | |
766 | VAAPIEncodeContext *ctx = avctx->priv_data; | |
767 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
768 | ||
769 | priv->fixed_qp_p = avctx->global_quality; | |
770 | if (avctx->i_quant_factor > 0.0) | |
771 | priv->fixed_qp_idr = (int)((priv->fixed_qp_p * avctx->i_quant_factor + | |
772 | avctx->i_quant_offset) + 0.5); | |
773 | else | |
774 | priv->fixed_qp_idr = priv->fixed_qp_p; | |
775 | if (avctx->b_quant_factor > 0.0) | |
776 | priv->fixed_qp_b = (int)((priv->fixed_qp_p * avctx->b_quant_factor + | |
777 | avctx->b_quant_offset) + 0.5); | |
778 | else | |
779 | priv->fixed_qp_b = priv->fixed_qp_p; | |
780 | ||
781 | av_log(avctx, AV_LOG_DEBUG, "Using fixed QP = " | |
782 | "%d / %d / %d for IDR / P / B frames.\n", | |
783 | priv->fixed_qp_idr, priv->fixed_qp_p, priv->fixed_qp_b); | |
784 | return 0; | |
785 | } | |
786 | ||
2c62fcdf MT |
787 | static av_cold int vaapi_encode_h264_init_internal(AVCodecContext *avctx) |
788 | { | |
f6b85523 MT |
789 | static const VAConfigAttrib default_config_attributes[] = { |
790 | { .type = VAConfigAttribRTFormat, | |
791 | .value = VA_RT_FORMAT_YUV420 }, | |
f6b85523 MT |
792 | { .type = VAConfigAttribEncPackedHeaders, |
793 | .value = (VA_ENC_PACKED_HEADER_SEQUENCE | | |
794 | VA_ENC_PACKED_HEADER_SLICE) }, | |
795 | }; | |
796 | ||
2c62fcdf MT |
797 | VAAPIEncodeContext *ctx = avctx->priv_data; |
798 | VAAPIEncodeH264Context *priv = ctx->priv_data; | |
6e8f66fc | 799 | int i, err; |
2c62fcdf MT |
800 | |
801 | switch (avctx->profile) { | |
802 | case FF_PROFILE_H264_CONSTRAINED_BASELINE: | |
803 | ctx->va_profile = VAProfileH264ConstrainedBaseline; | |
804 | break; | |
805 | case FF_PROFILE_H264_BASELINE: | |
806 | ctx->va_profile = VAProfileH264Baseline; | |
807 | break; | |
808 | case FF_PROFILE_H264_MAIN: | |
809 | ctx->va_profile = VAProfileH264Main; | |
810 | break; | |
811 | case FF_PROFILE_H264_EXTENDED: | |
812 | av_log(avctx, AV_LOG_ERROR, "H.264 extended profile " | |
813 | "is not supported.\n"); | |
814 | return AVERROR_PATCHWELCOME; | |
815 | case FF_PROFILE_UNKNOWN: | |
816 | case FF_PROFILE_H264_HIGH: | |
817 | ctx->va_profile = VAProfileH264High; | |
818 | break; | |
819 | case FF_PROFILE_H264_HIGH_10: | |
820 | case FF_PROFILE_H264_HIGH_10_INTRA: | |
821 | av_log(avctx, AV_LOG_ERROR, "H.264 10-bit profiles " | |
822 | "are not supported.\n"); | |
823 | return AVERROR_PATCHWELCOME; | |
824 | case FF_PROFILE_H264_HIGH_422: | |
825 | case FF_PROFILE_H264_HIGH_422_INTRA: | |
826 | case FF_PROFILE_H264_HIGH_444: | |
827 | case FF_PROFILE_H264_HIGH_444_PREDICTIVE: | |
828 | case FF_PROFILE_H264_HIGH_444_INTRA: | |
829 | case FF_PROFILE_H264_CAVLC_444: | |
830 | av_log(avctx, AV_LOG_ERROR, "H.264 non-4:2:0 profiles " | |
831 | "are not supported.\n"); | |
832 | return AVERROR_PATCHWELCOME; | |
833 | default: | |
834 | av_log(avctx, AV_LOG_ERROR, "Unknown H.264 profile %d.\n", | |
835 | avctx->profile); | |
836 | return AVERROR(EINVAL); | |
837 | } | |
838 | ctx->va_entrypoint = VAEntrypointEncSlice; | |
839 | ||
2c62fcdf MT |
840 | ctx->input_width = avctx->width; |
841 | ctx->input_height = avctx->height; | |
842 | ctx->aligned_width = FFALIGN(ctx->input_width, 16); | |
843 | ctx->aligned_height = FFALIGN(ctx->input_height, 16); | |
844 | priv->mb_width = ctx->aligned_width / 16; | |
845 | priv->mb_height = ctx->aligned_height / 16; | |
846 | ||
f6b85523 MT |
847 | for (i = 0; i < FF_ARRAY_ELEMS(default_config_attributes); i++) { |
848 | ctx->config_attributes[ctx->nb_config_attributes++] = | |
849 | default_config_attributes[i]; | |
850 | } | |
851 | ||
6e8f66fc MT |
852 | if (avctx->bit_rate > 0) { |
853 | ctx->va_rc_mode = VA_RC_CBR; | |
854 | err = vaapi_encode_h264_init_constant_bitrate(avctx); | |
855 | } else { | |
856 | ctx->va_rc_mode = VA_RC_CQP; | |
857 | err = vaapi_encode_h264_init_fixed_qp(avctx); | |
858 | } | |
859 | if (err < 0) | |
860 | return err; | |
861 | ||
862 | ctx->config_attributes[ctx->nb_config_attributes++] = (VAConfigAttrib) { | |
863 | .type = VAConfigAttribRateControl, | |
864 | .value = ctx->va_rc_mode, | |
865 | }; | |
2c62fcdf | 866 | |
2c62fcdf MT |
867 | ctx->nb_recon_frames = 20; |
868 | ||
869 | return 0; | |
870 | } | |
871 | ||
872 | static VAAPIEncodeType vaapi_encode_type_h264 = { | |
873 | .priv_data_size = sizeof(VAAPIEncodeH264Context), | |
874 | ||
875 | .init = &vaapi_encode_h264_init_internal, | |
876 | ||
877 | .sequence_params_size = sizeof(VAEncSequenceParameterBufferH264), | |
878 | .init_sequence_params = &vaapi_encode_h264_init_sequence_params, | |
879 | ||
880 | .picture_params_size = sizeof(VAEncPictureParameterBufferH264), | |
881 | .init_picture_params = &vaapi_encode_h264_init_picture_params, | |
882 | ||
883 | .slice_params_size = sizeof(VAEncSliceParameterBufferH264), | |
884 | .init_slice_params = &vaapi_encode_h264_init_slice_params, | |
885 | ||
886 | .sequence_header_type = VAEncPackedHeaderSequence, | |
887 | .write_sequence_header = &vaapi_encode_h264_write_sequence_header, | |
888 | ||
889 | .slice_header_type = VAEncPackedHeaderH264_Slice, | |
890 | .write_slice_header = &vaapi_encode_h264_write_slice_header, | |
891 | }; | |
892 | ||
893 | static av_cold int vaapi_encode_h264_init(AVCodecContext *avctx) | |
894 | { | |
895 | return ff_vaapi_encode_init(avctx, &vaapi_encode_type_h264); | |
896 | } | |
897 | ||
898 | static const AVCodecDefault vaapi_encode_h264_defaults[] = { | |
899 | { "profile", "100" }, | |
900 | { "level", "51" }, | |
901 | { "b", "0" }, | |
902 | { "bf", "2" }, | |
903 | { "g", "120" }, | |
904 | { "global_quality", "20" }, | |
905 | { "i_qfactor", "1.0" }, | |
906 | { "i_qoffset", "0.0" }, | |
907 | { "b_qfactor", "1.2" }, | |
908 | { "b_qoffset", "0.0" }, | |
909 | { NULL }, | |
910 | }; | |
911 | ||
912 | static const AVClass vaapi_encode_h264_class = { | |
913 | .class_name = "h264_vaapi", | |
914 | .item_name = av_default_item_name, | |
915 | .version = LIBAVUTIL_VERSION_INT, | |
916 | }; | |
917 | ||
918 | AVCodec ff_h264_vaapi_encoder = { | |
919 | .name = "h264_vaapi", | |
920 | .long_name = NULL_IF_CONFIG_SMALL("H.264/AVC (VAAPI)"), | |
921 | .type = AVMEDIA_TYPE_VIDEO, | |
922 | .id = AV_CODEC_ID_H264, | |
923 | .priv_data_size = sizeof(VAAPIEncodeContext), | |
924 | .init = &vaapi_encode_h264_init, | |
925 | .encode2 = &ff_vaapi_encode2, | |
926 | .close = &ff_vaapi_encode_close, | |
927 | .priv_class = &vaapi_encode_h264_class, | |
928 | .capabilities = AV_CODEC_CAP_DELAY, | |
929 | .defaults = vaapi_encode_h264_defaults, | |
930 | .pix_fmts = (const enum AVPixelFormat[]) { | |
931 | AV_PIX_FMT_VAAPI, | |
932 | AV_PIX_FMT_NONE, | |
933 | }, | |
934 | }; |