13b149fe48e2efb6c2f02f46bc0a41d0a243a302
[libav.git] / libavcodec / h264.h
1 /*
2 * H.26L/H.264/AVC/JVT/14496-10/... encoder/decoder
3 * Copyright (c) 2003 Michael Niedermayer <michaelni@gmx.at>
4 *
5 * This file is part of Libav.
6 *
7 * Libav is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
12 * Libav is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with Libav; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22 /**
23 * @file
24 * H.264 / AVC / MPEG4 part10 codec.
25 * @author Michael Niedermayer <michaelni@gmx.at>
26 */
27
28 #ifndef AVCODEC_H264_H
29 #define AVCODEC_H264_H
30
31 #include "libavutil/intreadwrite.h"
32 #include "libavutil/thread.h"
33 #include "cabac.h"
34 #include "error_resilience.h"
35 #include "get_bits.h"
36 #include "h264chroma.h"
37 #include "h264dsp.h"
38 #include "h264pred.h"
39 #include "h264qpel.h"
40 #include "internal.h"
41 #include "mpegutils.h"
42 #include "parser.h"
43 #include "qpeldsp.h"
44 #include "rectangle.h"
45 #include "videodsp.h"
46
47 #define H264_MAX_PICTURE_COUNT 32
48 #define H264_MAX_THREADS 16
49
50 #define MAX_SPS_COUNT 32
51 #define MAX_PPS_COUNT 256
52
53 #define MAX_MMCO_COUNT 66
54
55 #define MAX_DELAYED_PIC_COUNT 16
56
57 /* Compiling in interlaced support reduces the speed
58 * of progressive decoding by about 2%. */
59 #define ALLOW_INTERLACE
60
61 #define FMO 0
62
63 /**
64 * The maximum number of slices supported by the decoder.
65 * must be a power of 2
66 */
67 #define MAX_SLICES 32
68
69 #ifdef ALLOW_INTERLACE
70 #define MB_MBAFF(h) h->mb_mbaff
71 #define MB_FIELD(h) h->mb_field_decoding_flag
72 #define FRAME_MBAFF(h) h->mb_aff_frame
73 #define FIELD_PICTURE(h) (h->picture_structure != PICT_FRAME)
74 #define LEFT_MBS 2
75 #define LTOP 0
76 #define LBOT 1
77 #define LEFT(i) (i)
78 #else
79 #define MB_MBAFF(h) 0
80 #define MB_FIELD(h) 0
81 #define FRAME_MBAFF(h) 0
82 #define FIELD_PICTURE(h) 0
83 #undef IS_INTERLACED
84 #define IS_INTERLACED(mb_type) 0
85 #define LEFT_MBS 1
86 #define LTOP 0
87 #define LBOT 0
88 #define LEFT(i) 0
89 #endif
90 #define FIELD_OR_MBAFF_PICTURE(h) (FRAME_MBAFF(h) || FIELD_PICTURE(h))
91
92 #ifndef CABAC
93 #define CABAC(h) h->pps.cabac
94 #endif
95
96 #define CHROMA422(h) (h->sps.chroma_format_idc == 2)
97 #define CHROMA444(h) (h->sps.chroma_format_idc == 3)
98
99 #define EXTENDED_SAR 255
100
101 #define MB_TYPE_REF0 MB_TYPE_ACPRED // dirty but it fits in 16 bit
102 #define MB_TYPE_8x8DCT 0x01000000
103 #define IS_REF0(a) ((a) & MB_TYPE_REF0)
104 #define IS_8x8DCT(a) ((a) & MB_TYPE_8x8DCT)
105
106 #define QP_MAX_NUM (51 + 2 * 6) // The maximum supported qp
107
108 /* NAL unit types */
109 enum {
110 NAL_SLICE = 1,
111 NAL_DPA = 2,
112 NAL_DPB = 3,
113 NAL_DPC = 4,
114 NAL_IDR_SLICE = 5,
115 NAL_SEI = 6,
116 NAL_SPS = 7,
117 NAL_PPS = 8,
118 NAL_AUD = 9,
119 NAL_END_SEQUENCE = 10,
120 NAL_END_STREAM = 11,
121 NAL_FILLER_DATA = 12,
122 NAL_SPS_EXT = 13,
123 NAL_AUXILIARY_SLICE = 19,
124 NAL_FF_IGNORE = 0xff0f001,
125 };
126
127 /**
128 * SEI message types
129 */
130 typedef enum {
131 SEI_TYPE_BUFFERING_PERIOD = 0, ///< buffering period (H.264, D.1.1)
132 SEI_TYPE_PIC_TIMING = 1, ///< picture timing
133 SEI_TYPE_USER_DATA_REGISTERED = 4, ///< registered user data as specified by Rec. ITU-T T.35
134 SEI_TYPE_USER_DATA_UNREGISTERED = 5, ///< unregistered user data
135 SEI_TYPE_RECOVERY_POINT = 6, ///< recovery point (frame # to decoder sync)
136 SEI_TYPE_FRAME_PACKING = 45, ///< frame packing arrangement
137 SEI_TYPE_DISPLAY_ORIENTATION = 47, ///< display orientation
138 } SEI_Type;
139
140 /**
141 * pic_struct in picture timing SEI message
142 */
143 typedef enum {
144 SEI_PIC_STRUCT_FRAME = 0, ///< 0: %frame
145 SEI_PIC_STRUCT_TOP_FIELD = 1, ///< 1: top field
146 SEI_PIC_STRUCT_BOTTOM_FIELD = 2, ///< 2: bottom field
147 SEI_PIC_STRUCT_TOP_BOTTOM = 3, ///< 3: top field, bottom field, in that order
148 SEI_PIC_STRUCT_BOTTOM_TOP = 4, ///< 4: bottom field, top field, in that order
149 SEI_PIC_STRUCT_TOP_BOTTOM_TOP = 5, ///< 5: top field, bottom field, top field repeated, in that order
150 SEI_PIC_STRUCT_BOTTOM_TOP_BOTTOM = 6, ///< 6: bottom field, top field, bottom field repeated, in that order
151 SEI_PIC_STRUCT_FRAME_DOUBLING = 7, ///< 7: %frame doubling
152 SEI_PIC_STRUCT_FRAME_TRIPLING = 8 ///< 8: %frame tripling
153 } SEI_PicStructType;
154
155 /**
156 * Sequence parameter set
157 */
158 typedef struct SPS {
159 unsigned int sps_id;
160 int profile_idc;
161 int level_idc;
162 int chroma_format_idc;
163 int transform_bypass; ///< qpprime_y_zero_transform_bypass_flag
164 int log2_max_frame_num; ///< log2_max_frame_num_minus4 + 4
165 int poc_type; ///< pic_order_cnt_type
166 int log2_max_poc_lsb; ///< log2_max_pic_order_cnt_lsb_minus4
167 int delta_pic_order_always_zero_flag;
168 int offset_for_non_ref_pic;
169 int offset_for_top_to_bottom_field;
170 int poc_cycle_length; ///< num_ref_frames_in_pic_order_cnt_cycle
171 int ref_frame_count; ///< num_ref_frames
172 int gaps_in_frame_num_allowed_flag;
173 int mb_width; ///< pic_width_in_mbs_minus1 + 1
174 int mb_height; ///< pic_height_in_map_units_minus1 + 1
175 int frame_mbs_only_flag;
176 int mb_aff; ///< mb_adaptive_frame_field_flag
177 int direct_8x8_inference_flag;
178 int crop; ///< frame_cropping_flag
179
180 /* those 4 are already in luma samples */
181 unsigned int crop_left; ///< frame_cropping_rect_left_offset
182 unsigned int crop_right; ///< frame_cropping_rect_right_offset
183 unsigned int crop_top; ///< frame_cropping_rect_top_offset
184 unsigned int crop_bottom; ///< frame_cropping_rect_bottom_offset
185 int vui_parameters_present_flag;
186 AVRational sar;
187 int video_signal_type_present_flag;
188 int full_range;
189 int colour_description_present_flag;
190 enum AVColorPrimaries color_primaries;
191 enum AVColorTransferCharacteristic color_trc;
192 enum AVColorSpace colorspace;
193 int timing_info_present_flag;
194 uint32_t num_units_in_tick;
195 uint32_t time_scale;
196 int fixed_frame_rate_flag;
197 short offset_for_ref_frame[256]; // FIXME dyn aloc?
198 int bitstream_restriction_flag;
199 int num_reorder_frames;
200 int scaling_matrix_present;
201 uint8_t scaling_matrix4[6][16];
202 uint8_t scaling_matrix8[6][64];
203 int nal_hrd_parameters_present_flag;
204 int vcl_hrd_parameters_present_flag;
205 int pic_struct_present_flag;
206 int time_offset_length;
207 int cpb_cnt; ///< See H.264 E.1.2
208 int initial_cpb_removal_delay_length; ///< initial_cpb_removal_delay_length_minus1 + 1
209 int cpb_removal_delay_length; ///< cpb_removal_delay_length_minus1 + 1
210 int dpb_output_delay_length; ///< dpb_output_delay_length_minus1 + 1
211 int bit_depth_luma; ///< bit_depth_luma_minus8 + 8
212 int bit_depth_chroma; ///< bit_depth_chroma_minus8 + 8
213 int residual_color_transform_flag; ///< residual_colour_transform_flag
214 int constraint_set_flags; ///< constraint_set[0-3]_flag
215 int new; ///< flag to keep track if the decoder context needs re-init due to changed SPS
216 } SPS;
217
218 /**
219 * Picture parameter set
220 */
221 typedef struct PPS {
222 unsigned int sps_id;
223 int cabac; ///< entropy_coding_mode_flag
224 int pic_order_present; ///< pic_order_present_flag
225 int slice_group_count; ///< num_slice_groups_minus1 + 1
226 int mb_slice_group_map_type;
227 unsigned int ref_count[2]; ///< num_ref_idx_l0/1_active_minus1 + 1
228 int weighted_pred; ///< weighted_pred_flag
229 int weighted_bipred_idc;
230 int init_qp; ///< pic_init_qp_minus26 + 26
231 int init_qs; ///< pic_init_qs_minus26 + 26
232 int chroma_qp_index_offset[2];
233 int deblocking_filter_parameters_present; ///< deblocking_filter_parameters_present_flag
234 int constrained_intra_pred; ///< constrained_intra_pred_flag
235 int redundant_pic_cnt_present; ///< redundant_pic_cnt_present_flag
236 int transform_8x8_mode; ///< transform_8x8_mode_flag
237 uint8_t scaling_matrix4[6][16];
238 uint8_t scaling_matrix8[6][64];
239 uint8_t chroma_qp_table[2][64]; ///< pre-scaled (with chroma_qp_index_offset) version of qp_table
240 int chroma_qp_diff;
241 } PPS;
242
243 /**
244 * Memory management control operation opcode.
245 */
246 typedef enum MMCOOpcode {
247 MMCO_END = 0,
248 MMCO_SHORT2UNUSED,
249 MMCO_LONG2UNUSED,
250 MMCO_SHORT2LONG,
251 MMCO_SET_MAX_LONG,
252 MMCO_RESET,
253 MMCO_LONG,
254 } MMCOOpcode;
255
256 /**
257 * Memory management control operation.
258 */
259 typedef struct MMCO {
260 MMCOOpcode opcode;
261 int short_pic_num; ///< pic_num without wrapping (pic_num & max_pic_num)
262 int long_arg; ///< index, pic_num, or num long refs depending on opcode
263 } MMCO;
264
265 typedef struct H264Picture {
266 AVFrame *f;
267 ThreadFrame tf;
268
269 AVBufferRef *qscale_table_buf;
270 int8_t *qscale_table;
271
272 AVBufferRef *motion_val_buf[2];
273 int16_t (*motion_val[2])[2];
274
275 AVBufferRef *mb_type_buf;
276 uint32_t *mb_type;
277
278 AVBufferRef *hwaccel_priv_buf;
279 void *hwaccel_picture_private; ///< hardware accelerator private data
280
281 AVBufferRef *ref_index_buf[2];
282 int8_t *ref_index[2];
283
284 int field_poc[2]; ///< top/bottom POC
285 int poc; ///< frame POC
286 int frame_num; ///< frame_num (raw frame_num from slice header)
287 int mmco_reset; /**< MMCO_RESET set this 1. Reordering code must
288 not mix pictures before and after MMCO_RESET. */
289 int pic_id; /**< pic_num (short -> no wrap version of pic_num,
290 pic_num & max_pic_num; long -> long_pic_num) */
291 int long_ref; ///< 1->long term reference 0->short term reference
292 int ref_poc[2][2][32]; ///< POCs of the frames used as reference (FIXME need per slice)
293 int ref_count[2][2]; ///< number of entries in ref_poc (FIXME need per slice)
294 int mbaff; ///< 1 -> MBAFF frame 0-> not MBAFF
295 int field_picture; ///< whether or not picture was encoded in separate fields
296
297 int reference;
298 int recovered; ///< picture at IDR or recovery point + recovery count
299 } H264Picture;
300
301 typedef struct H264Ref {
302 uint8_t *data[3];
303 int linesize[3];
304
305 int reference;
306 int poc;
307 int pic_id;
308
309 H264Picture *parent;
310 } H264Ref;
311
312 typedef struct H264SliceContext {
313 struct H264Context *h264;
314 GetBitContext gb;
315 ERContext er;
316
317 int slice_num;
318 int slice_type;
319 int slice_type_nos; ///< S free slice type (SI/SP are remapped to I/P)
320 int slice_type_fixed;
321
322 int qscale;
323 int chroma_qp[2]; // QPc
324 int qp_thresh; ///< QP threshold to skip loopfilter
325 int last_qscale_diff;
326
327 // deblock
328 int deblocking_filter; ///< disable_deblocking_filter_idc with 1 <-> 0
329 int slice_alpha_c0_offset;
330 int slice_beta_offset;
331
332 // Weighted pred stuff
333 int use_weight;
334 int use_weight_chroma;
335 int luma_log2_weight_denom;
336 int chroma_log2_weight_denom;
337 int luma_weight_flag[2]; ///< 7.4.3.2 luma_weight_lX_flag
338 int chroma_weight_flag[2]; ///< 7.4.3.2 chroma_weight_lX_flag
339 // The following 2 can be changed to int8_t but that causes 10cpu cycles speedloss
340 int luma_weight[48][2][2];
341 int chroma_weight[48][2][2][2];
342 int implicit_weight[48][48][2];
343
344 int prev_mb_skipped;
345 int next_mb_skipped;
346
347 int chroma_pred_mode;
348 int intra16x16_pred_mode;
349
350 int8_t intra4x4_pred_mode_cache[5 * 8];
351 int8_t(*intra4x4_pred_mode);
352
353 int topleft_mb_xy;
354 int top_mb_xy;
355 int topright_mb_xy;
356 int left_mb_xy[LEFT_MBS];
357
358 int topleft_type;
359 int top_type;
360 int topright_type;
361 int left_type[LEFT_MBS];
362
363 const uint8_t *left_block;
364 int topleft_partition;
365
366 unsigned int topleft_samples_available;
367 unsigned int top_samples_available;
368 unsigned int topright_samples_available;
369 unsigned int left_samples_available;
370
371 ptrdiff_t linesize, uvlinesize;
372 ptrdiff_t mb_linesize; ///< may be equal to s->linesize or s->linesize * 2, for mbaff
373 ptrdiff_t mb_uvlinesize;
374
375 int mb_x, mb_y;
376 int mb_xy;
377 int resync_mb_x;
378 int resync_mb_y;
379 // index of the first MB of the next slice
380 int next_slice_idx;
381 int mb_skip_run;
382 int is_complex;
383
384 int mb_field_decoding_flag;
385 int mb_mbaff; ///< mb_aff_frame && mb_field_decoding_flag
386
387 int redundant_pic_count;
388
389 /**
390 * number of neighbors (top and/or left) that used 8x8 dct
391 */
392 int neighbor_transform_size;
393
394 int direct_spatial_mv_pred;
395 int col_parity;
396 int col_fieldoff;
397
398 int cbp;
399 int top_cbp;
400 int left_cbp;
401
402 int dist_scale_factor[32];
403 int dist_scale_factor_field[2][32];
404 int map_col_to_list0[2][16 + 32];
405 int map_col_to_list0_field[2][2][16 + 32];
406
407 /**
408 * num_ref_idx_l0/1_active_minus1 + 1
409 */
410 unsigned int ref_count[2]; ///< counts frames or fields, depending on current mb mode
411 unsigned int list_count;
412 H264Ref ref_list[2][48]; /**< 0..15: frame refs, 16..47: mbaff field refs.
413 * Reordered version of default_ref_list
414 * according to picture reordering in slice header */
415 int ref2frm[MAX_SLICES][2][64]; ///< reference to frame number lists, used in the loop filter, the first 2 are for -2,-1
416
417 const uint8_t *intra_pcm_ptr;
418 int16_t *dc_val_base;
419
420 uint8_t *bipred_scratchpad;
421 uint8_t *edge_emu_buffer;
422 uint8_t (*top_borders[2])[(16 * 3) * 2];
423 int bipred_scratchpad_allocated;
424 int edge_emu_buffer_allocated;
425 int top_borders_allocated[2];
426
427 /**
428 * non zero coeff count cache.
429 * is 64 if not available.
430 */
431 DECLARE_ALIGNED(8, uint8_t, non_zero_count_cache)[15 * 8];
432
433 /**
434 * Motion vector cache.
435 */
436 DECLARE_ALIGNED(16, int16_t, mv_cache)[2][5 * 8][2];
437 DECLARE_ALIGNED(8, int8_t, ref_cache)[2][5 * 8];
438 DECLARE_ALIGNED(16, uint8_t, mvd_cache)[2][5 * 8][2];
439 uint8_t direct_cache[5 * 8];
440
441 DECLARE_ALIGNED(8, uint16_t, sub_mb_type)[4];
442
443 ///< as a dct coeffecient is int32_t in high depth, we need to reserve twice the space.
444 DECLARE_ALIGNED(16, int16_t, mb)[16 * 48 * 2];
445 DECLARE_ALIGNED(16, int16_t, mb_luma_dc)[3][16 * 2];
446 ///< as mb is addressed by scantable[i] and scantable is uint8_t we can either
447 ///< check that i is not too large or ensure that there is some unused stuff after mb
448 int16_t mb_padding[256 * 2];
449
450 uint8_t (*mvd_table[2])[2];
451
452 /**
453 * Cabac
454 */
455 CABACContext cabac;
456 uint8_t cabac_state[1024];
457 int cabac_init_idc;
458
459 // rbsp buffer used for this slice
460 uint8_t *rbsp_buffer;
461 unsigned int rbsp_buffer_size;
462 } H264SliceContext;
463
464 /**
465 * H264Context
466 */
467 typedef struct H264Context {
468 AVCodecContext *avctx;
469 VideoDSPContext vdsp;
470 H264DSPContext h264dsp;
471 H264ChromaContext h264chroma;
472 H264QpelContext h264qpel;
473 GetBitContext gb;
474
475 H264Picture DPB[H264_MAX_PICTURE_COUNT];
476 H264Picture *cur_pic_ptr;
477 H264Picture cur_pic;
478
479 H264SliceContext *slice_ctx;
480 int nb_slice_ctx;
481
482 int pixel_shift; ///< 0 for 8-bit H264, 1 for high-bit-depth H264
483
484 /* coded dimensions -- 16 * mb w/h */
485 int width, height;
486 int chroma_x_shift, chroma_y_shift;
487
488 int droppable;
489 int coded_picture_number;
490 int low_delay;
491
492 int context_initialized;
493 int flags;
494 int workaround_bugs;
495
496 int8_t(*intra4x4_pred_mode);
497 H264PredContext hpc;
498
499 uint8_t (*non_zero_count)[48];
500
501 #define LIST_NOT_USED -1 // FIXME rename?
502 #define PART_NOT_AVAILABLE -2
503
504 /**
505 * block_offset[ 0..23] for frame macroblocks
506 * block_offset[24..47] for field macroblocks
507 */
508 int block_offset[2 * (16 * 3)];
509
510 uint32_t *mb2b_xy; // FIXME are these 4 a good idea?
511 uint32_t *mb2br_xy;
512 int b_stride; // FIXME use s->b4_stride
513
514 SPS sps; ///< current sps
515 PPS pps; ///< current pps
516
517 uint32_t dequant4_buffer[6][QP_MAX_NUM + 1][16]; // FIXME should these be moved down?
518 uint32_t dequant8_buffer[6][QP_MAX_NUM + 1][64];
519 uint32_t(*dequant4_coeff[6])[16];
520 uint32_t(*dequant8_coeff[6])[64];
521
522 uint16_t *slice_table; ///< slice_table_base + 2*mb_stride + 1
523
524 // interlacing specific flags
525 int mb_aff_frame;
526 int picture_structure;
527 int first_field;
528
529 uint8_t *list_counts; ///< Array of list_count per MB specifying the slice type
530
531 /* 0x100 -> non null luma_dc, 0x80/0x40 -> non null chroma_dc (cb/cr), 0x?0 -> chroma_cbp(0, 1, 2), 0x0? luma_cbp */
532 uint16_t *cbp_table;
533
534 /* chroma_pred_mode for i4x4 or i16x16, else 0 */
535 uint8_t *chroma_pred_mode_table;
536 uint8_t (*mvd_table[2])[2];
537 uint8_t *direct_table;
538
539 uint8_t zigzag_scan[16];
540 uint8_t zigzag_scan8x8[64];
541 uint8_t zigzag_scan8x8_cavlc[64];
542 uint8_t field_scan[16];
543 uint8_t field_scan8x8[64];
544 uint8_t field_scan8x8_cavlc[64];
545 const uint8_t *zigzag_scan_q0;
546 const uint8_t *zigzag_scan8x8_q0;
547 const uint8_t *zigzag_scan8x8_cavlc_q0;
548 const uint8_t *field_scan_q0;
549 const uint8_t *field_scan8x8_q0;
550 const uint8_t *field_scan8x8_cavlc_q0;
551
552 int x264_build;
553
554 int mb_y;
555 int mb_height, mb_width;
556 int mb_stride;
557 int mb_num;
558
559 // =============================================================
560 // Things below are not used in the MB or more inner code
561
562 int nal_ref_idc;
563 int nal_unit_type;
564
565 /**
566 * Used to parse AVC variant of h264
567 */
568 int is_avc; ///< this flag is != 0 if codec is avc1
569 int nal_length_size; ///< Number of bytes used for nal length (1, 2 or 4)
570
571 int bit_depth_luma; ///< luma bit depth from sps to detect changes
572 int chroma_format_idc; ///< chroma format from sps to detect changes
573
574 SPS *sps_buffers[MAX_SPS_COUNT];
575 PPS *pps_buffers[MAX_PPS_COUNT];
576
577 int dequant_coeff_pps; ///< reinit tables when pps changes
578
579 uint16_t *slice_table_base;
580
581 // POC stuff
582 int poc_lsb;
583 int poc_msb;
584 int delta_poc_bottom;
585 int delta_poc[2];
586 int frame_num;
587 int prev_poc_msb; ///< poc_msb of the last reference pic for POC type 0
588 int prev_poc_lsb; ///< poc_lsb of the last reference pic for POC type 0
589 int frame_num_offset; ///< for POC type 2
590 int prev_frame_num_offset; ///< for POC type 2
591 int prev_frame_num; ///< frame_num of the last pic for POC type 1/2
592
593 /**
594 * frame_num for frames or 2 * frame_num + 1 for field pics.
595 */
596 int curr_pic_num;
597
598 /**
599 * max_frame_num or 2 * max_frame_num for field pics.
600 */
601 int max_pic_num;
602
603 H264Picture *short_ref[32];
604 H264Picture *long_ref[32];
605 H264Picture *delayed_pic[MAX_DELAYED_PIC_COUNT + 2]; // FIXME size?
606 int last_pocs[MAX_DELAYED_PIC_COUNT];
607 H264Picture *next_output_pic;
608 int next_outputed_poc;
609
610 /**
611 * memory management control operations buffer.
612 */
613 MMCO mmco[MAX_MMCO_COUNT];
614 int mmco_index;
615 int mmco_reset;
616
617 int long_ref_count; ///< number of actual long term references
618 int short_ref_count; ///< number of actual short term references
619
620 /**
621 * @name Members for slice based multithreading
622 * @{
623 */
624 /**
625 * current slice number, used to initalize slice_num of each thread/context
626 */
627 int current_slice;
628
629 /**
630 * Max number of threads / contexts.
631 * This is equal to AVCodecContext.thread_count unless
632 * multithreaded decoding is impossible, in which case it is
633 * reduced to 1.
634 */
635 int max_contexts;
636
637 int slice_context_count;
638
639 /**
640 * 1 if the single thread fallback warning has already been
641 * displayed, 0 otherwise.
642 */
643 int single_decode_warning;
644
645 enum AVPictureType pict_type;
646
647 /** @} */
648
649 /**
650 * pic_struct in picture timing SEI message
651 */
652 SEI_PicStructType sei_pic_struct;
653
654 /**
655 * Complement sei_pic_struct
656 * SEI_PIC_STRUCT_TOP_BOTTOM and SEI_PIC_STRUCT_BOTTOM_TOP indicate interlaced frames.
657 * However, soft telecined frames may have these values.
658 * This is used in an attempt to flag soft telecine progressive.
659 */
660 int prev_interlaced_frame;
661
662 /**
663 * frame_packing_arrangment SEI message
664 */
665 int sei_frame_packing_present;
666 int frame_packing_arrangement_type;
667 int content_interpretation_type;
668 int quincunx_subsampling;
669
670 /**
671 * display orientation SEI message
672 */
673 int sei_display_orientation_present;
674 int sei_anticlockwise_rotation;
675 int sei_hflip, sei_vflip;
676
677 /**
678 * User data registered by Rec. ITU-T T.35 SEI
679 */
680 int sei_reguserdata_afd_present;
681 uint8_t active_format_description;
682 int a53_caption_size;
683 uint8_t *a53_caption;
684
685 /**
686 * Bit set of clock types for fields/frames in picture timing SEI message.
687 * For each found ct_type, appropriate bit is set (e.g., bit 1 for
688 * interlaced).
689 */
690 int sei_ct_type;
691
692 /**
693 * dpb_output_delay in picture timing SEI message, see H.264 C.2.2
694 */
695 int sei_dpb_output_delay;
696
697 /**
698 * cpb_removal_delay in picture timing SEI message, see H.264 C.1.2
699 */
700 int sei_cpb_removal_delay;
701
702 /**
703 * recovery_frame_cnt from SEI message
704 *
705 * Set to -1 if no recovery point SEI message found or to number of frames
706 * before playback synchronizes. Frames having recovery point are key
707 * frames.
708 */
709 int sei_recovery_frame_cnt;
710
711 /**
712 * recovery_frame is the frame_num at which the next frame should
713 * be fully constructed.
714 *
715 * Set to -1 when not expecting a recovery point.
716 */
717 int recovery_frame;
718
719 /**
720 * We have seen an IDR, so all the following frames in coded order are correctly
721 * decodable.
722 */
723 #define FRAME_RECOVERED_IDR (1 << 0)
724 /**
725 * Sufficient number of frames have been decoded since a SEI recovery point,
726 * so all the following frames in presentation order are correct.
727 */
728 #define FRAME_RECOVERED_SEI (1 << 1)
729
730 int frame_recovered; ///< Initial frame has been completely recovered
731
732 /* for frame threading, this is set to 1
733 * after finish_setup() has been called, so we cannot modify
734 * some context properties (which are supposed to stay constant between
735 * slices) anymore */
736 int setup_finished;
737
738 // Timestamp stuff
739 int sei_buffering_period_present; ///< Buffering period SEI flag
740 int initial_cpb_removal_delay[32]; ///< Initial timestamps for CPBs
741
742 int enable_er;
743
744 AVBufferPool *qscale_table_pool;
745 AVBufferPool *mb_type_pool;
746 AVBufferPool *motion_val_pool;
747 AVBufferPool *ref_index_pool;
748
749 /* Motion Estimation */
750 qpel_mc_func (*qpel_put)[16];
751 qpel_mc_func (*qpel_avg)[16];
752 } H264Context;
753
754 extern const uint8_t ff_h264_chroma_qp[3][QP_MAX_NUM + 1]; ///< One chroma qp table for each supported bit depth (8, 9, 10).
755 extern const uint16_t ff_h264_mb_sizes[4];
756
757 /**
758 * Decode SEI
759 */
760 int ff_h264_decode_sei(H264Context *h);
761
762 /**
763 * Decode SPS
764 */
765 int ff_h264_decode_seq_parameter_set(H264Context *h);
766
767 /**
768 * compute profile from sps
769 */
770 int ff_h264_get_profile(SPS *sps);
771
772 /**
773 * Decode PPS
774 */
775 int ff_h264_decode_picture_parameter_set(H264Context *h, int bit_length);
776
777 /**
778 * Decode a network abstraction layer unit.
779 * @param consumed is the number of bytes used as input
780 * @param length is the length of the array
781 * @param dst_length is the number of decoded bytes FIXME here
782 * or a decode rbsp tailing?
783 * @return decoded bytes, might be src+1 if no escapes
784 */
785 const uint8_t *ff_h264_decode_nal(H264Context *h, H264SliceContext *sl, const uint8_t *src,
786 int *dst_length, int *consumed, int length);
787
788 /**
789 * Free any data that may have been allocated in the H264 context
790 * like SPS, PPS etc.
791 */
792 void ff_h264_free_context(H264Context *h);
793
794 /**
795 * Reconstruct bitstream slice_type.
796 */
797 int ff_h264_get_slice_type(const H264SliceContext *sl);
798
799 /**
800 * Allocate tables.
801 * needs width/height
802 */
803 int ff_h264_alloc_tables(H264Context *h);
804
805 int ff_h264_decode_ref_pic_list_reordering(H264Context *h, H264SliceContext *sl);
806 void ff_h264_fill_mbaff_ref_list(H264Context *h, H264SliceContext *sl);
807 void ff_h264_remove_all_refs(H264Context *h);
808
809 /**
810 * Execute the reference picture marking (memory management control operations).
811 */
812 int ff_h264_execute_ref_pic_marking(H264Context *h, MMCO *mmco, int mmco_count);
813
814 int ff_h264_decode_ref_pic_marking(H264Context *h, GetBitContext *gb,
815 int first_slice);
816
817 int ff_generate_sliding_window_mmcos(H264Context *h, int first_slice);
818
819 /**
820 * Check if the top & left blocks are available if needed & change the
821 * dc mode so it only uses the available blocks.
822 */
823 int ff_h264_check_intra4x4_pred_mode(const H264Context *h, H264SliceContext *sl);
824
825 /**
826 * Check if the top & left blocks are available if needed & change the
827 * dc mode so it only uses the available blocks.
828 */
829 int ff_h264_check_intra_pred_mode(const H264Context *h, H264SliceContext *sl,
830 int mode, int is_chroma);
831
832 void ff_h264_hl_decode_mb(const H264Context *h, H264SliceContext *sl);
833 int ff_h264_decode_extradata(H264Context *h);
834 int ff_h264_decode_init(AVCodecContext *avctx);
835 void ff_h264_decode_init_vlc(void);
836
837 /**
838 * Decode a macroblock
839 * @return 0 if OK, ER_AC_ERROR / ER_DC_ERROR / ER_MV_ERROR on error
840 */
841 int ff_h264_decode_mb_cavlc(const H264Context *h, H264SliceContext *sl);
842
843 /**
844 * Decode a CABAC coded macroblock
845 * @return 0 if OK, ER_AC_ERROR / ER_DC_ERROR / ER_MV_ERROR on error
846 */
847 int ff_h264_decode_mb_cabac(const H264Context *h, H264SliceContext *sl);
848
849 void ff_h264_init_cabac_states(const H264Context *h, H264SliceContext *sl);
850
851 void h264_init_dequant_tables(H264Context *h);
852
853 void ff_h264_direct_dist_scale_factor(const H264Context *const h, H264SliceContext *sl);
854 void ff_h264_direct_ref_list_init(const H264Context *const h, H264SliceContext *sl);
855 void ff_h264_pred_direct_motion(const H264Context *const h, H264SliceContext *sl,
856 int *mb_type);
857
858 void ff_h264_filter_mb_fast(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y,
859 uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr,
860 unsigned int linesize, unsigned int uvlinesize);
861 void ff_h264_filter_mb(const H264Context *h, H264SliceContext *sl, int mb_x, int mb_y,
862 uint8_t *img_y, uint8_t *img_cb, uint8_t *img_cr,
863 unsigned int linesize, unsigned int uvlinesize);
864
865 /**
866 * Reset SEI values at the beginning of the frame.
867 *
868 * @param h H.264 context.
869 */
870 void ff_h264_reset_sei(H264Context *h);
871
872 /*
873 * o-o o-o
874 * / / /
875 * o-o o-o
876 * ,---'
877 * o-o o-o
878 * / / /
879 * o-o o-o
880 */
881
882 /* Scan8 organization:
883 * 0 1 2 3 4 5 6 7
884 * 0 DY y y y y y
885 * 1 y Y Y Y Y
886 * 2 y Y Y Y Y
887 * 3 y Y Y Y Y
888 * 4 y Y Y Y Y
889 * 5 DU u u u u u
890 * 6 u U U U U
891 * 7 u U U U U
892 * 8 u U U U U
893 * 9 u U U U U
894 * 10 DV v v v v v
895 * 11 v V V V V
896 * 12 v V V V V
897 * 13 v V V V V
898 * 14 v V V V V
899 * DY/DU/DV are for luma/chroma DC.
900 */
901
902 #define LUMA_DC_BLOCK_INDEX 48
903 #define CHROMA_DC_BLOCK_INDEX 49
904
905 // This table must be here because scan8[constant] must be known at compiletime
906 static const uint8_t scan8[16 * 3 + 3] = {
907 4 + 1 * 8, 5 + 1 * 8, 4 + 2 * 8, 5 + 2 * 8,
908 6 + 1 * 8, 7 + 1 * 8, 6 + 2 * 8, 7 + 2 * 8,
909 4 + 3 * 8, 5 + 3 * 8, 4 + 4 * 8, 5 + 4 * 8,
910 6 + 3 * 8, 7 + 3 * 8, 6 + 4 * 8, 7 + 4 * 8,
911 4 + 6 * 8, 5 + 6 * 8, 4 + 7 * 8, 5 + 7 * 8,
912 6 + 6 * 8, 7 + 6 * 8, 6 + 7 * 8, 7 + 7 * 8,
913 4 + 8 * 8, 5 + 8 * 8, 4 + 9 * 8, 5 + 9 * 8,
914 6 + 8 * 8, 7 + 8 * 8, 6 + 9 * 8, 7 + 9 * 8,
915 4 + 11 * 8, 5 + 11 * 8, 4 + 12 * 8, 5 + 12 * 8,
916 6 + 11 * 8, 7 + 11 * 8, 6 + 12 * 8, 7 + 12 * 8,
917 4 + 13 * 8, 5 + 13 * 8, 4 + 14 * 8, 5 + 14 * 8,
918 6 + 13 * 8, 7 + 13 * 8, 6 + 14 * 8, 7 + 14 * 8,
919 0 + 0 * 8, 0 + 5 * 8, 0 + 10 * 8
920 };
921
922 static av_always_inline uint32_t pack16to32(int a, int b)
923 {
924 #if HAVE_BIGENDIAN
925 return (b & 0xFFFF) + (a << 16);
926 #else
927 return (a & 0xFFFF) + (b << 16);
928 #endif
929 }
930
931 static av_always_inline uint16_t pack8to16(int a, int b)
932 {
933 #if HAVE_BIGENDIAN
934 return (b & 0xFF) + (a << 8);
935 #else
936 return (a & 0xFF) + (b << 8);
937 #endif
938 }
939
940 /**
941 * Get the chroma qp.
942 */
943 static av_always_inline int get_chroma_qp(const H264Context *h, int t, int qscale)
944 {
945 return h->pps.chroma_qp_table[t][qscale];
946 }
947
948 /**
949 * Get the predicted intra4x4 prediction mode.
950 */
951 static av_always_inline int pred_intra_mode(const H264Context *h,
952 H264SliceContext *sl, int n)
953 {
954 const int index8 = scan8[n];
955 const int left = sl->intra4x4_pred_mode_cache[index8 - 1];
956 const int top = sl->intra4x4_pred_mode_cache[index8 - 8];
957 const int min = FFMIN(left, top);
958
959 ff_tlog(h->avctx, "mode:%d %d min:%d\n", left, top, min);
960
961 if (min < 0)
962 return DC_PRED;
963 else
964 return min;
965 }
966
967 static av_always_inline void write_back_intra_pred_mode(const H264Context *h,
968 H264SliceContext *sl)
969 {
970 int8_t *i4x4 = sl->intra4x4_pred_mode + h->mb2br_xy[sl->mb_xy];
971 int8_t *i4x4_cache = sl->intra4x4_pred_mode_cache;
972
973 AV_COPY32(i4x4, i4x4_cache + 4 + 8 * 4);
974 i4x4[4] = i4x4_cache[7 + 8 * 3];
975 i4x4[5] = i4x4_cache[7 + 8 * 2];
976 i4x4[6] = i4x4_cache[7 + 8 * 1];
977 }
978
979 static av_always_inline void write_back_non_zero_count(const H264Context *h,
980 H264SliceContext *sl)
981 {
982 const int mb_xy = sl->mb_xy;
983 uint8_t *nnz = h->non_zero_count[mb_xy];
984 uint8_t *nnz_cache = sl->non_zero_count_cache;
985
986 AV_COPY32(&nnz[ 0], &nnz_cache[4 + 8 * 1]);
987 AV_COPY32(&nnz[ 4], &nnz_cache[4 + 8 * 2]);
988 AV_COPY32(&nnz[ 8], &nnz_cache[4 + 8 * 3]);
989 AV_COPY32(&nnz[12], &nnz_cache[4 + 8 * 4]);
990 AV_COPY32(&nnz[16], &nnz_cache[4 + 8 * 6]);
991 AV_COPY32(&nnz[20], &nnz_cache[4 + 8 * 7]);
992 AV_COPY32(&nnz[32], &nnz_cache[4 + 8 * 11]);
993 AV_COPY32(&nnz[36], &nnz_cache[4 + 8 * 12]);
994
995 if (!h->chroma_y_shift) {
996 AV_COPY32(&nnz[24], &nnz_cache[4 + 8 * 8]);
997 AV_COPY32(&nnz[28], &nnz_cache[4 + 8 * 9]);
998 AV_COPY32(&nnz[40], &nnz_cache[4 + 8 * 13]);
999 AV_COPY32(&nnz[44], &nnz_cache[4 + 8 * 14]);
1000 }
1001 }
1002
1003 static av_always_inline void write_back_motion_list(const H264Context *h,
1004 H264SliceContext *sl,
1005 int b_stride,
1006 int b_xy, int b8_xy,
1007 int mb_type, int list)
1008 {
1009 int16_t(*mv_dst)[2] = &h->cur_pic.motion_val[list][b_xy];
1010 int16_t(*mv_src)[2] = &sl->mv_cache[list][scan8[0]];
1011 AV_COPY128(mv_dst + 0 * b_stride, mv_src + 8 * 0);
1012 AV_COPY128(mv_dst + 1 * b_stride, mv_src + 8 * 1);
1013 AV_COPY128(mv_dst + 2 * b_stride, mv_src + 8 * 2);
1014 AV_COPY128(mv_dst + 3 * b_stride, mv_src + 8 * 3);
1015 if (CABAC(h)) {
1016 uint8_t (*mvd_dst)[2] = &sl->mvd_table[list][FMO ? 8 * sl->mb_xy
1017 : h->mb2br_xy[sl->mb_xy]];
1018 uint8_t(*mvd_src)[2] = &sl->mvd_cache[list][scan8[0]];
1019 if (IS_SKIP(mb_type)) {
1020 AV_ZERO128(mvd_dst);
1021 } else {
1022 AV_COPY64(mvd_dst, mvd_src + 8 * 3);
1023 AV_COPY16(mvd_dst + 3 + 3, mvd_src + 3 + 8 * 0);
1024 AV_COPY16(mvd_dst + 3 + 2, mvd_src + 3 + 8 * 1);
1025 AV_COPY16(mvd_dst + 3 + 1, mvd_src + 3 + 8 * 2);
1026 }
1027 }
1028
1029 {
1030 int8_t *ref_index = &h->cur_pic.ref_index[list][b8_xy];
1031 int8_t *ref_cache = sl->ref_cache[list];
1032 ref_index[0 + 0 * 2] = ref_cache[scan8[0]];
1033 ref_index[1 + 0 * 2] = ref_cache[scan8[4]];
1034 ref_index[0 + 1 * 2] = ref_cache[scan8[8]];
1035 ref_index[1 + 1 * 2] = ref_cache[scan8[12]];
1036 }
1037 }
1038
1039 static av_always_inline void write_back_motion(const H264Context *h,
1040 H264SliceContext *sl,
1041 int mb_type)
1042 {
1043 const int b_stride = h->b_stride;
1044 const int b_xy = 4 * sl->mb_x + 4 * sl->mb_y * h->b_stride; // try mb2b(8)_xy
1045 const int b8_xy = 4 * sl->mb_xy;
1046
1047 if (USES_LIST(mb_type, 0)) {
1048 write_back_motion_list(h, sl, b_stride, b_xy, b8_xy, mb_type, 0);
1049 } else {
1050 fill_rectangle(&h->cur_pic.ref_index[0][b8_xy],
1051 2, 2, 2, (uint8_t)LIST_NOT_USED, 1);
1052 }
1053 if (USES_LIST(mb_type, 1))
1054 write_back_motion_list(h, sl, b_stride, b_xy, b8_xy, mb_type, 1);
1055
1056 if (sl->slice_type_nos == AV_PICTURE_TYPE_B && CABAC(h)) {
1057 if (IS_8X8(mb_type)) {
1058 uint8_t *direct_table = &h->direct_table[4 * sl->mb_xy];
1059 direct_table[1] = sl->sub_mb_type[1] >> 1;
1060 direct_table[2] = sl->sub_mb_type[2] >> 1;
1061 direct_table[3] = sl->sub_mb_type[3] >> 1;
1062 }
1063 }
1064 }
1065
1066 static av_always_inline int get_dct8x8_allowed(const H264Context *h, H264SliceContext *sl)
1067 {
1068 if (h->sps.direct_8x8_inference_flag)
1069 return !(AV_RN64A(sl->sub_mb_type) &
1070 ((MB_TYPE_16x8 | MB_TYPE_8x16 | MB_TYPE_8x8) *
1071 0x0001000100010001ULL));
1072 else
1073 return !(AV_RN64A(sl->sub_mb_type) &
1074 ((MB_TYPE_16x8 | MB_TYPE_8x16 | MB_TYPE_8x8 | MB_TYPE_DIRECT2) *
1075 0x0001000100010001ULL));
1076 }
1077
1078 int ff_h264_field_end(H264Context *h, H264SliceContext *sl, int in_setup);
1079
1080 int ff_h264_ref_picture(H264Context *h, H264Picture *dst, H264Picture *src);
1081 void ff_h264_unref_picture(H264Context *h, H264Picture *pic);
1082
1083 int ff_h264_slice_context_init(H264Context *h, H264SliceContext *sl);
1084
1085 void ff_h264_draw_horiz_band(const H264Context *h, H264SliceContext *sl, int y, int height);
1086 int ff_init_poc(H264Context *h, int pic_field_poc[2], int *pic_poc);
1087 int ff_pred_weight_table(H264Context *h, H264SliceContext *sl);
1088 int ff_set_ref_count(H264Context *h, H264SliceContext *sl);
1089
1090 int ff_h264_decode_slice_header(H264Context *h, H264SliceContext *sl);
1091 int ff_h264_execute_decode_slices(H264Context *h, unsigned context_count);
1092 int ff_h264_update_thread_context(AVCodecContext *dst,
1093 const AVCodecContext *src);
1094
1095 void ff_h264_flush_change(H264Context *h);
1096
1097 void ff_h264_free_tables(H264Context *h);
1098
1099 #endif /* AVCODEC_H264_H */