lavc: Split out ff_hwaccel_pixfmt_list_420[] over individual codecs
[libav.git] / libavcodec / mpegvideo.c
CommitLineData
de6d9b64
FB
1/*
2 * The simplest mpeg encoder (well, it was the simplest!)
406792e7 3 * Copyright (c) 2000,2001 Fabrice Bellard
8f2ab833 4 * Copyright (c) 2002-2004 Michael Niedermayer <michaelni@gmx.at>
de6d9b64 5 *
7b94177e
DB
6 * 4MV & hq & B-frame encoding stuff by Michael Niedermayer <michaelni@gmx.at>
7 *
2912e87a 8 * This file is part of Libav.
b78e7197 9 *
2912e87a 10 * Libav is free software; you can redistribute it and/or
ff4ec49e
FB
11 * modify it under the terms of the GNU Lesser General Public
12 * License as published by the Free Software Foundation; either
b78e7197 13 * version 2.1 of the License, or (at your option) any later version.
de6d9b64 14 *
2912e87a 15 * Libav is distributed in the hope that it will be useful,
de6d9b64 16 * but WITHOUT ANY WARRANTY; without even the implied warranty of
ff4ec49e
FB
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
18 * Lesser General Public License for more details.
de6d9b64 19 *
ff4ec49e 20 * You should have received a copy of the GNU Lesser General Public
2912e87a 21 * License along with Libav; if not, write to the Free Software
5509bffa 22 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
de6d9b64 23 */
115329f1 24
983e3246 25/**
ba87f080 26 * @file
983e3246 27 * The simplest mpeg encoder (well, it was the simplest!).
115329f1
DB
28 */
29
737eb597 30#include "libavutil/imgutils.h"
de6d9b64
FB
31#include "avcodec.h"
32#include "dsputil.h"
603a5f04 33#include "internal.h"
9734b8ba 34#include "mathops.h"
de6d9b64 35#include "mpegvideo.h"
d9c9259f 36#include "mjpegenc.h"
15025553 37#include "msmpeg4.h"
4440bd0d 38#include "xvmc_internal.h"
6a9c8594 39#include "thread.h"
e96682e6 40#include <limits.h>
de6d9b64 41
e4eadb4b
MN
42//#undef NDEBUG
43//#include <assert.h>
2ad1516a 44
115329f1 45static void dct_unquantize_mpeg1_intra_c(MpegEncContext *s,
88bd7fdc 46 int16_t *block, int n, int qscale);
115329f1 47static void dct_unquantize_mpeg1_inter_c(MpegEncContext *s,
88bd7fdc 48 int16_t *block, int n, int qscale);
d50635cd 49static void dct_unquantize_mpeg2_intra_c(MpegEncContext *s,
88bd7fdc 50 int16_t *block, int n, int qscale);
e27b6e62 51static void dct_unquantize_mpeg2_intra_bitexact(MpegEncContext *s,
88bd7fdc 52 int16_t *block, int n, int qscale);
d50635cd 53static void dct_unquantize_mpeg2_inter_c(MpegEncContext *s,
88bd7fdc 54 int16_t *block, int n, int qscale);
115329f1 55static void dct_unquantize_h263_intra_c(MpegEncContext *s,
88bd7fdc 56 int16_t *block, int n, int qscale);
115329f1 57static void dct_unquantize_h263_inter_c(MpegEncContext *s,
88bd7fdc 58 int16_t *block, int n, int qscale);
3d9fccbf 59
de6d9b64
FB
60
61/* enable all paranoid tests for rounding, overflows, etc... */
62//#define PARANOID
63
64//#define DEBUG
65
101bea5f 66
363114e8
KT
67static const uint8_t ff_default_chroma_qscale_table[32] = {
68// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
69 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15,
70 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31
332f9ac4
MN
71};
72
363114e8
KT
73const uint8_t ff_mpeg1_dc_scale_table[128] = {
74// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
75 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
76 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
77 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
78 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
79 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
80 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
81 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
82 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8,
6cbe71bd
AJ
83};
84
363114e8
KT
85static const uint8_t mpeg2_dc_scale_table1[128] = {
86// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
87 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
88 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
89 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
90 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
91 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
92 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
93 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
94 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4,
6506c3d2
MN
95};
96
363114e8
KT
97static const uint8_t mpeg2_dc_scale_table2[128] = {
98// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
99 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
100 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
101 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
102 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
103 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
104 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
105 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
106 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
6506c3d2
MN
107};
108
363114e8
KT
109static const uint8_t mpeg2_dc_scale_table3[128] = {
110// 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
111 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
112 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
113 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
114 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
115 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
116 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
117 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
118 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
6506c3d2
MN
119};
120
363114e8 121const uint8_t *const ff_mpeg2_dc_scale_table[4] = {
6506c3d2
MN
122 ff_mpeg1_dc_scale_table,
123 mpeg2_dc_scale_table1,
124 mpeg2_dc_scale_table2,
125 mpeg2_dc_scale_table3,
126};
127
716d413c
AK
128const enum AVPixelFormat ff_pixfmt_list_420[] = {
129 AV_PIX_FMT_YUV420P,
130 AV_PIX_FMT_NONE
044f0296
GB
131};
132
54974c62
AK
133static void mpeg_er_decode_mb(void *opaque, int ref, int mv_dir, int mv_type,
134 int (*mv)[2][4][2],
135 int mb_x, int mb_y, int mb_intra, int mb_skipped)
136{
137 MpegEncContext *s = opaque;
138
139 s->mv_dir = mv_dir;
140 s->mv_type = mv_type;
141 s->mb_intra = mb_intra;
142 s->mb_skipped = mb_skipped;
143 s->mb_x = mb_x;
144 s->mb_y = mb_y;
145 memcpy(s->mv, mv, sizeof(*mv));
146
147 ff_init_block_index(s);
148 ff_update_block_index(s);
149
150 s->dsp.clear_blocks(s->block[0]);
151
152 s->dest[0] = s->current_picture.f.data[0] + (s->mb_y * 16 * s->linesize) + s->mb_x * 16;
153 s->dest[1] = s->current_picture.f.data[1] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
154 s->dest[2] = s->current_picture.f.data[2] + (s->mb_y * (16 >> s->chroma_y_shift) * s->uvlinesize) + s->mb_x * (16 >> s->chroma_x_shift);
155
156 assert(ref == 0);
157 ff_MPV_decode_mb(s, s->block);
158}
159
363114e8
KT
160const uint8_t *avpriv_mpv_find_start_code(const uint8_t *restrict p,
161 const uint8_t *end,
162 uint32_t * restrict state)
163{
82fcbc14
MN
164 int i;
165
363114e8
KT
166 assert(p <= end);
167 if (p >= end)
8cb90572
MN
168 return end;
169
363114e8
KT
170 for (i = 0; i < 3; i++) {
171 uint32_t tmp = *state << 8;
172 *state = tmp + *(p++);
173 if (tmp == 0x100 || p == end)
82fcbc14
MN
174 return p;
175 }
82fcbc14 176
363114e8
KT
177 while (p < end) {
178 if (p[-1] > 1 ) p += 3;
179 else if (p[-2] ) p += 2;
180 else if (p[-3]|(p[-1]-1)) p++;
181 else {
82fcbc14
MN
182 p++;
183 break;
184 }
185 }
186
363114e8
KT
187 p = FFMIN(p, end) - 4;
188 *state = AV_RB32(p);
82fcbc14 189
363114e8 190 return p + 4;
82fcbc14
MN
191}
192
defdfc9a 193/* init common dct for both encoder and decoder */
5ef251e5 194av_cold int ff_dct_common_init(MpegEncContext *s)
de6d9b64 195{
9cf0841e 196 ff_dsputil_init(&s->dsp, s->avctx);
1f4ea4e0 197 ff_videodsp_init(&s->vdsp, s->avctx->bits_per_raw_sample);
c3027b4d 198
d50635cd
MN
199 s->dct_unquantize_h263_intra = dct_unquantize_h263_intra_c;
200 s->dct_unquantize_h263_inter = dct_unquantize_h263_inter_c;
201 s->dct_unquantize_mpeg1_intra = dct_unquantize_mpeg1_intra_c;
202 s->dct_unquantize_mpeg1_inter = dct_unquantize_mpeg1_inter_c;
203 s->dct_unquantize_mpeg2_intra = dct_unquantize_mpeg2_intra_c;
363114e8 204 if (s->flags & CODEC_FLAG_BITEXACT)
e27b6e62 205 s->dct_unquantize_mpeg2_intra = dct_unquantize_mpeg2_intra_bitexact;
d50635cd 206 s->dct_unquantize_mpeg2_inter = dct_unquantize_mpeg2_inter_c;
b0368839 207
dc402854
DB
208#if ARCH_X86
209 ff_MPV_common_init_x86(s);
b250f9c6 210#elif ARCH_ALPHA
efd29844 211 ff_MPV_common_init_axp(s);
b250f9c6 212#elif ARCH_ARM
efd29844 213 ff_MPV_common_init_arm(s);
b250f9c6 214#elif HAVE_ALTIVEC
efd29844 215 ff_MPV_common_init_altivec(s);
b250f9c6 216#elif ARCH_BFIN
efd29844 217 ff_MPV_common_init_bfin(s);
1a822d30 218#endif
676e200c 219
2ad1516a 220 /* load & permutate scantables
363114e8
KT
221 * note: only wmv uses different ones
222 */
223 if (s->alternate_scan) {
bb198e19
MN
224 ff_init_scantable(s->dsp.idct_permutation, &s->inter_scantable , ff_alternate_vertical_scan);
225 ff_init_scantable(s->dsp.idct_permutation, &s->intra_scantable , ff_alternate_vertical_scan);
363114e8 226 } else {
bb198e19
MN
227 ff_init_scantable(s->dsp.idct_permutation, &s->inter_scantable , ff_zigzag_direct);
228 ff_init_scantable(s->dsp.idct_permutation, &s->intra_scantable , ff_zigzag_direct);
229 }
3d2e8cce
MN
230 ff_init_scantable(s->dsp.idct_permutation, &s->intra_h_scantable, ff_alternate_horizontal_scan);
231 ff_init_scantable(s->dsp.idct_permutation, &s->intra_v_scantable, ff_alternate_vertical_scan);
d930ef19 232
defdfc9a
AB
233 return 0;
234}
235
363114e8
KT
236void ff_copy_picture(Picture *dst, Picture *src)
237{
6571e41d 238 *dst = *src;
363114e8 239 dst->f.type = FF_BUFFER_TYPE_COPY;
6571e41d
MN
240}
241
1e491e29 242/**
49bd8e4b 243 * Release a frame buffer
34e46c44
GB
244 */
245static void free_frame_buffer(MpegEncContext *s, Picture *pic)
246{
ee769c6a
AD
247 /* WM Image / Screen codecs allocate internal buffers with different
248 * dimensions / colorspaces; ignore user-defined callbacks for these. */
249 if (s->codec_id != AV_CODEC_ID_WMV3IMAGE &&
250 s->codec_id != AV_CODEC_ID_VC1IMAGE &&
251 s->codec_id != AV_CODEC_ID_MSS2)
47c0ac96 252 ff_thread_release_buffer(s->avctx, &pic->f);
45ecda85 253 else
47c0ac96 254 avcodec_default_release_buffer(s->avctx, &pic->f);
657ccb5a 255 av_freep(&pic->f.hwaccel_picture_private);
34e46c44
GB
256}
257
f1d8763a
JG
258int ff_mpv_frame_size_alloc(MpegEncContext *s, int linesize)
259{
260 int alloc_size = FFALIGN(FFABS(linesize) + 32, 32);
261
262 // edge emu needs blocksize + filter length - 1
263 // (= 17x17 for halfpel / 21x21 for h264)
45635885
JG
264 // VC1 computes luma and chroma simultaneously and needs 19X19 + 9x9
265 // at uvlinesize. It supports only YUV420 so 24x24 is enough
f1d8763a 266 // linesize * interlaced * MBsize
45635885 267 FF_ALLOCZ_OR_GOTO(s->avctx, s->edge_emu_buffer, alloc_size * 2 * 24,
f1d8763a
JG
268 fail);
269
270 FF_ALLOCZ_OR_GOTO(s->avctx, s->me.scratchpad, alloc_size * 2 * 16 * 2,
271 fail)
272 s->me.temp = s->me.scratchpad;
273 s->rd_scratchpad = s->me.scratchpad;
274 s->b_scratchpad = s->me.scratchpad;
275 s->obmc_scratchpad = s->me.scratchpad + 16;
276
277 return 0;
278fail:
279 av_freep(&s->edge_emu_buffer);
280 return AVERROR(ENOMEM);
281}
282
34e46c44 283/**
49bd8e4b 284 * Allocate a frame buffer
34e46c44
GB
285 */
286static int alloc_frame_buffer(MpegEncContext *s, Picture *pic)
287{
f1d8763a 288 int r, ret;
34e46c44 289
68e5d523 290 if (s->avctx->hwaccel) {
95a06eb4 291 assert(!pic->f.hwaccel_picture_private);
68e5d523 292 if (s->avctx->hwaccel->priv_data_size) {
657ccb5a
DB
293 pic->f.hwaccel_picture_private = av_mallocz(s->avctx->hwaccel->priv_data_size);
294 if (!pic->f.hwaccel_picture_private) {
68e5d523
GB
295 av_log(s->avctx, AV_LOG_ERROR, "alloc_frame_buffer() failed (hwaccel private data allocation)\n");
296 return -1;
297 }
298 }
299 }
300
ee769c6a
AD
301 if (s->codec_id != AV_CODEC_ID_WMV3IMAGE &&
302 s->codec_id != AV_CODEC_ID_VC1IMAGE &&
303 s->codec_id != AV_CODEC_ID_MSS2)
47c0ac96 304 r = ff_thread_get_buffer(s->avctx, &pic->f);
45ecda85 305 else
47c0ac96 306 r = avcodec_default_get_buffer(s->avctx, &pic->f);
34e46c44 307
8400b126
MR
308 if (r < 0 || !pic->f.type || !pic->f.data[0]) {
309 av_log(s->avctx, AV_LOG_ERROR, "get_buffer() failed (%d %d %p)\n",
310 r, pic->f.type, pic->f.data[0]);
657ccb5a 311 av_freep(&pic->f.hwaccel_picture_private);
34e46c44
GB
312 return -1;
313 }
314
363114e8
KT
315 if (s->linesize && (s->linesize != pic->f.linesize[0] ||
316 s->uvlinesize != pic->f.linesize[1])) {
317 av_log(s->avctx, AV_LOG_ERROR,
318 "get_buffer() failed (stride changed)\n");
34e46c44
GB
319 free_frame_buffer(s, pic);
320 return -1;
321 }
322
657ccb5a 323 if (pic->f.linesize[1] != pic->f.linesize[2]) {
363114e8
KT
324 av_log(s->avctx, AV_LOG_ERROR,
325 "get_buffer() failed (uv stride mismatch)\n");
34e46c44
GB
326 free_frame_buffer(s, pic);
327 return -1;
328 }
329
f1d8763a
JG
330 if (!s->edge_emu_buffer &&
331 (ret = ff_mpv_frame_size_alloc(s, pic->f.linesize[0])) < 0) {
332 av_log(s->avctx, AV_LOG_ERROR,
333 "get_buffer() failed to allocate context scratch buffers.\n");
334 free_frame_buffer(s, pic);
335 return ret;
336 }
337
34e46c44
GB
338 return 0;
339}
340
341/**
58c42af7 342 * Allocate a Picture.
363114e8 343 * The pixels are allocated/set by calling get_buffer() if shared = 0
1e491e29 344 */
363114e8
KT
345int ff_alloc_picture(MpegEncContext *s, Picture *pic, int shared)
346{
347 const int big_mb_num = s->mb_stride * (s->mb_height + 1) + 1;
348
349 // the + 1 is needed so memset(,,stride*height) does not sig11
350
351 const int mb_array_size = s->mb_stride * s->mb_height;
352 const int b8_array_size = s->b8_stride * s->mb_height * 2;
353 const int b4_array_size = s->b4_stride * s->mb_height * 4;
0da71265 354 int i;
363114e8 355 int r = -1;
115329f1 356
363114e8 357 if (shared) {
657ccb5a 358 assert(pic->f.data[0]);
95a06eb4 359 assert(pic->f.type == 0 || pic->f.type == FF_BUFFER_TYPE_SHARED);
657ccb5a 360 pic->f.type = FF_BUFFER_TYPE_SHARED;
363114e8 361 } else {
657ccb5a 362 assert(!pic->f.data[0]);
115329f1 363
34e46c44 364 if (alloc_frame_buffer(s, pic) < 0)
4e00e76b 365 return -1;
4e00e76b 366
657ccb5a
DB
367 s->linesize = pic->f.linesize[0];
368 s->uvlinesize = pic->f.linesize[1];
1e491e29 369 }
115329f1 370
657ccb5a 371 if (pic->f.qscale_table == NULL) {
115329f1 372 if (s->encoding) {
363114e8
KT
373 FF_ALLOCZ_OR_GOTO(s->avctx, pic->mb_var,
374 mb_array_size * sizeof(int16_t), fail)
375 FF_ALLOCZ_OR_GOTO(s->avctx, pic->mc_mb_var,
376 mb_array_size * sizeof(int16_t), fail)
377 FF_ALLOCZ_OR_GOTO(s->avctx, pic->mb_mean,
378 mb_array_size * sizeof(int8_t ), fail)
4e00e76b 379 }
1e491e29 380
363114e8
KT
381 FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.mbskip_table,
382 mb_array_size * sizeof(uint8_t) + 2, fail)// the + 2 is for the slice end check
383 FF_ALLOCZ_OR_GOTO(s->avctx, pic->qscale_table_base,
384 (big_mb_num + s->mb_stride) * sizeof(uint8_t),
385 fail)
386 FF_ALLOCZ_OR_GOTO(s->avctx, pic->mb_type_base,
387 (big_mb_num + s->mb_stride) * sizeof(uint32_t),
388 fail)
389 pic->f.mb_type = pic->mb_type_base + 2 * s->mb_stride + 1;
390 pic->f.qscale_table = pic->qscale_table_base + 2 * s->mb_stride + 1;
391 if (s->out_format == FMT_H264) {
392 for (i = 0; i < 2; i++) {
393 FF_ALLOCZ_OR_GOTO(s->avctx, pic->motion_val_base[i],
394 2 * (b4_array_size + 4) * sizeof(int16_t),
395 fail)
657ccb5a 396 pic->f.motion_val[i] = pic->motion_val_base[i] + 4;
363114e8
KT
397 FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.ref_index[i],
398 4 * mb_array_size * sizeof(uint8_t), fail)
b40cd4e0 399 }
657ccb5a 400 pic->f.motion_subsample_log2 = 2;
363114e8
KT
401 } else if (s->out_format == FMT_H263 || s->encoding ||
402 (s->avctx->debug & FF_DEBUG_MV) || s->avctx->debug_mv) {
403 for (i = 0; i < 2; i++) {
404 FF_ALLOCZ_OR_GOTO(s->avctx, pic->motion_val_base[i],
405 2 * (b8_array_size + 4) * sizeof(int16_t),
406 fail)
657ccb5a 407 pic->f.motion_val[i] = pic->motion_val_base[i] + 4;
363114e8
KT
408 FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.ref_index[i],
409 4 * mb_array_size * sizeof(uint8_t), fail)
0da71265 410 }
657ccb5a 411 pic->f.motion_subsample_log2 = 3;
0da71265 412 }
363114e8
KT
413 if (s->avctx->debug&FF_DEBUG_DCT_COEFF) {
414 FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.dct_coeff,
88bd7fdc 415 64 * mb_array_size * sizeof(int16_t) * 6, fail)
8289c6fa 416 }
657ccb5a 417 pic->f.qstride = s->mb_stride;
363114e8
KT
418 FF_ALLOCZ_OR_GOTO(s->avctx, pic->f.pan_scan,
419 1 * sizeof(AVPanScan), fail)
4e00e76b 420 }
0da71265 421
80469eaf 422 pic->owner2 = s;
115329f1 423
1e491e29 424 return 0;
363114e8
KT
425fail: // for the FF_ALLOCZ_OR_GOTO macro
426 if (r >= 0)
34e46c44 427 free_frame_buffer(s, pic);
1e491e29
MN
428 return -1;
429}
430
4e00e76b 431/**
58c42af7 432 * Deallocate a picture.
4e00e76b 433 */
363114e8
KT
434static void free_picture(MpegEncContext *s, Picture *pic)
435{
1e491e29 436 int i;
4e00e76b 437
657ccb5a 438 if (pic->f.data[0] && pic->f.type != FF_BUFFER_TYPE_SHARED) {
34e46c44 439 free_frame_buffer(s, pic);
4e00e76b
MN
440 }
441
1e491e29
MN
442 av_freep(&pic->mb_var);
443 av_freep(&pic->mc_mb_var);
444 av_freep(&pic->mb_mean);
657ccb5a 445 av_freep(&pic->f.mbskip_table);
5029a406 446 av_freep(&pic->qscale_table_base);
59383d57 447 pic->f.qscale_table = NULL;
0da71265 448 av_freep(&pic->mb_type_base);
59383d57 449 pic->f.mb_type = NULL;
657ccb5a
DB
450 av_freep(&pic->f.dct_coeff);
451 av_freep(&pic->f.pan_scan);
452 pic->f.mb_type = NULL;
363114e8 453 for (i = 0; i < 2; i++) {
b40cd4e0 454 av_freep(&pic->motion_val_base[i]);
657ccb5a 455 av_freep(&pic->f.ref_index[i]);
59383d57 456 pic->f.motion_val[i] = NULL;
0da71265 457 }
115329f1 458
657ccb5a 459 if (pic->f.type == FF_BUFFER_TYPE_SHARED) {
363114e8 460 for (i = 0; i < 4; i++) {
657ccb5a
DB
461 pic->f.base[i] =
462 pic->f.data[i] = NULL;
4e00e76b 463 }
657ccb5a 464 pic->f.type = 0;
1e491e29
MN
465 }
466}
467
12b54a1f 468static int init_duplicate_context(MpegEncContext *s)
363114e8 469{
2cbd734a
MR
470 int y_size = s->b8_stride * (2 * s->mb_height + 1);
471 int c_size = s->mb_stride * (s->mb_height + 1);
472 int yc_size = y_size + 2 * c_size;
9c3d33d6
MN
473 int i;
474
f1d8763a
JG
475 s->edge_emu_buffer =
476 s->me.scratchpad =
477 s->me.temp =
478 s->rd_scratchpad =
479 s->b_scratchpad =
480 s->obmc_scratchpad = NULL;
363114e8 481
9c3d33d6 482 if (s->encoding) {
363114e8
KT
483 FF_ALLOCZ_OR_GOTO(s->avctx, s->me.map,
484 ME_MAP_SIZE * sizeof(uint32_t), fail)
485 FF_ALLOCZ_OR_GOTO(s->avctx, s->me.score_map,
486 ME_MAP_SIZE * sizeof(uint32_t), fail)
487 if (s->avctx->noise_reduction) {
488 FF_ALLOCZ_OR_GOTO(s->avctx, s->dct_error_sum,
489 2 * 64 * sizeof(int), fail)
9c3d33d6 490 }
115329f1 491 }
88bd7fdc 492 FF_ALLOCZ_OR_GOTO(s->avctx, s->blocks, 64 * 12 * 2 * sizeof(int16_t), fail)
363114e8 493 s->block = s->blocks[0];
9c3d33d6 494
363114e8 495 for (i = 0; i < 12; i++) {
21effaa4 496 s->pblocks[i] = &s->block[i];
9c3d33d6 497 }
2cbd734a 498
79042a6e
MR
499 if (s->out_format == FMT_H263) {
500 /* ac values */
363114e8
KT
501 FF_ALLOCZ_OR_GOTO(s->avctx, s->ac_val_base,
502 yc_size * sizeof(int16_t) * 16, fail);
2cbd734a
MR
503 s->ac_val[0] = s->ac_val_base + s->b8_stride + 1;
504 s->ac_val[1] = s->ac_val_base + y_size + s->mb_stride + 1;
505 s->ac_val[2] = s->ac_val[1] + c_size;
506 }
507
9c3d33d6
MN
508 return 0;
509fail:
efd29844 510 return -1; // free() through ff_MPV_common_end()
9c3d33d6
MN
511}
512
363114e8
KT
513static void free_duplicate_context(MpegEncContext *s)
514{
515 if (s == NULL)
516 return;
9c3d33d6 517
330deb75 518 av_freep(&s->edge_emu_buffer);
9c3d33d6 519 av_freep(&s->me.scratchpad);
363114e8
KT
520 s->me.temp =
521 s->rd_scratchpad =
522 s->b_scratchpad =
523 s->obmc_scratchpad = NULL;
115329f1 524
9c3d33d6
MN
525 av_freep(&s->dct_error_sum);
526 av_freep(&s->me.map);
527 av_freep(&s->me.score_map);
528 av_freep(&s->blocks);
2cbd734a 529 av_freep(&s->ac_val_base);
363114e8 530 s->block = NULL;
9c3d33d6
MN
531}
532
363114e8
KT
533static void backup_duplicate_context(MpegEncContext *bak, MpegEncContext *src)
534{
535#define COPY(a) bak->a = src->a
9c3d33d6
MN
536 COPY(edge_emu_buffer);
537 COPY(me.scratchpad);
a6f2c0d6 538 COPY(me.temp);
9c3d33d6
MN
539 COPY(rd_scratchpad);
540 COPY(b_scratchpad);
541 COPY(obmc_scratchpad);
542 COPY(me.map);
543 COPY(me.score_map);
544 COPY(blocks);
545 COPY(block);
546 COPY(start_mb_y);
547 COPY(end_mb_y);
548 COPY(me.map_generation);
549 COPY(pb);
550 COPY(dct_error_sum);
da16b204
MN
551 COPY(dct_count[0]);
552 COPY(dct_count[1]);
2cbd734a
MR
553 COPY(ac_val_base);
554 COPY(ac_val[0]);
555 COPY(ac_val[1]);
556 COPY(ac_val[2]);
9c3d33d6
MN
557#undef COPY
558}
559
f1d8763a 560int ff_update_duplicate_context(MpegEncContext *dst, MpegEncContext *src)
363114e8 561{
9c3d33d6 562 MpegEncContext bak;
f1d8763a 563 int i, ret;
363114e8
KT
564 // FIXME copy only needed parts
565 // START_TIMER
9c3d33d6
MN
566 backup_duplicate_context(&bak, dst);
567 memcpy(dst, src, sizeof(MpegEncContext));
568 backup_duplicate_context(dst, &bak);
363114e8 569 for (i = 0; i < 12; i++) {
21effaa4 570 dst->pblocks[i] = &dst->block[i];
c62c07d3 571 }
f1d8763a
JG
572 if (!dst->edge_emu_buffer &&
573 (ret = ff_mpv_frame_size_alloc(dst, dst->linesize)) < 0) {
574 av_log(dst->avctx, AV_LOG_ERROR, "failed to allocate context "
575 "scratch buffers.\n");
576 return ret;
577 }
363114e8
KT
578 // STOP_TIMER("update_duplicate_context")
579 // about 10k cycles / 0.01 sec for 1000frames on 1ghz with 2 threads
f1d8763a 580 return 0;
9c3d33d6
MN
581}
582
363114e8
KT
583int ff_mpeg_update_thread_context(AVCodecContext *dst,
584 const AVCodecContext *src)
6a9c8594 585{
1481e198 586 int i;
6a9c8594
AS
587 MpegEncContext *s = dst->priv_data, *s1 = src->priv_data;
588
363114e8
KT
589 if (dst == src || !s1->context_initialized)
590 return 0;
6a9c8594 591
363114e8
KT
592 // FIXME can parameters change on I-frames?
593 // in that case dst may need a reinit
594 if (!s->context_initialized) {
6a9c8594
AS
595 memcpy(s, s1, sizeof(MpegEncContext));
596
597 s->avctx = dst;
598 s->picture_range_start += MAX_PICTURE_COUNT;
599 s->picture_range_end += MAX_PICTURE_COUNT;
600 s->bitstream_buffer = NULL;
601 s->bitstream_buffer_size = s->allocated_bitstream_buffer_size = 0;
602
efd29844 603 ff_MPV_common_init(s);
6a9c8594
AS
604 }
605
8701f4f8
JG
606 if (s->height != s1->height || s->width != s1->width || s->context_reinit) {
607 int err;
608 s->context_reinit = 0;
609 s->height = s1->height;
610 s->width = s1->width;
611 if ((err = ff_MPV_common_frame_size_change(s)) < 0)
612 return err;
613 }
614
6a9c8594
AS
615 s->avctx->coded_height = s1->avctx->coded_height;
616 s->avctx->coded_width = s1->avctx->coded_width;
617 s->avctx->width = s1->avctx->width;
618 s->avctx->height = s1->avctx->height;
619
620 s->coded_picture_number = s1->coded_picture_number;
621 s->picture_number = s1->picture_number;
622 s->input_picture_number = s1->input_picture_number;
623
624 memcpy(s->picture, s1->picture, s1->picture_count * sizeof(Picture));
4d9ec050
KT
625 memcpy(&s->last_picture, &s1->last_picture,
626 (char *) &s1->last_picture_ptr - (char *) &s1->last_picture);
6a9c8594 627
1481e198
JG
628 // reset s->picture[].f.extended_data to s->picture[].f.data
629 for (i = 0; i < s->picture_count; i++)
630 s->picture[i].f.extended_data = s->picture[i].f.data;
631
4d9ec050
KT
632 s->last_picture_ptr = REBASE_PICTURE(s1->last_picture_ptr, s, s1);
633 s->current_picture_ptr = REBASE_PICTURE(s1->current_picture_ptr, s, s1);
634 s->next_picture_ptr = REBASE_PICTURE(s1->next_picture_ptr, s, s1);
6a9c8594 635
4d9ec050 636 // Error/bug resilience
6a9c8594
AS
637 s->next_p_frame_damaged = s1->next_p_frame_damaged;
638 s->workaround_bugs = s1->workaround_bugs;
639
4d9ec050
KT
640 // MPEG4 timing info
641 memcpy(&s->time_increment_bits, &s1->time_increment_bits,
642 (char *) &s1->shape - (char *) &s1->time_increment_bits);
643
644 // B-frame info
645 s->max_b_frames = s1->max_b_frames;
646 s->low_delay = s1->low_delay;
ba0c8981 647 s->droppable = s1->droppable;
4d9ec050
KT
648
649 // DivX handling (doesn't work)
650 s->divx_packed = s1->divx_packed;
651
652 if (s1->bitstream_buffer) {
653 if (s1->bitstream_buffer_size +
654 FF_INPUT_BUFFER_PADDING_SIZE > s->allocated_bitstream_buffer_size)
655 av_fast_malloc(&s->bitstream_buffer,
656 &s->allocated_bitstream_buffer_size,
657 s1->allocated_bitstream_buffer_size);
658 s->bitstream_buffer_size = s1->bitstream_buffer_size;
659 memcpy(s->bitstream_buffer, s1->bitstream_buffer,
660 s1->bitstream_buffer_size);
661 memset(s->bitstream_buffer + s->bitstream_buffer_size, 0,
662 FF_INPUT_BUFFER_PADDING_SIZE);
6a9c8594
AS
663 }
664
f1d8763a
JG
665 // linesize dependend scratch buffer allocation
666 if (!s->edge_emu_buffer)
667 if (s1->linesize) {
668 if (ff_mpv_frame_size_alloc(s, s1->linesize) < 0) {
669 av_log(s->avctx, AV_LOG_ERROR, "Failed to allocate context "
670 "scratch buffers.\n");
671 return AVERROR(ENOMEM);
672 }
673 } else {
674 av_log(s->avctx, AV_LOG_ERROR, "Context scratch buffers could not "
675 "be allocated due to unknown size.\n");
676 return AVERROR_BUG;
677 }
678
4d9ec050
KT
679 // MPEG2/interlacing info
680 memcpy(&s->progressive_sequence, &s1->progressive_sequence,
681 (char *) &s1->rtp_mode - (char *) &s1->progressive_sequence);
6a9c8594 682
4d9ec050
KT
683 if (!s1->first_field) {
684 s->last_pict_type = s1->pict_type;
685 if (s1->current_picture_ptr)
686 s->last_lambda_for[s1->pict_type] = s1->current_picture_ptr->f.quality;
6a9c8594 687
e5b29c1f 688 if (s1->pict_type != AV_PICTURE_TYPE_B) {
4d9ec050 689 s->last_non_b_pict_type = s1->pict_type;
6a9c8594
AS
690 }
691 }
692
693 return 0;
694}
695
3edcacde 696/**
58c42af7 697 * Set the given MpegEncContext to common defaults
4d9ec050 698 * (same for encoding and decoding).
58c42af7 699 * The changed fields will not depend upon the
4d9ec050 700 * prior state of the MpegEncContext.
3edcacde 701 */
efd29844 702void ff_MPV_common_defaults(MpegEncContext *s)
4d9ec050
KT
703{
704 s->y_dc_scale_table =
705 s->c_dc_scale_table = ff_mpeg1_dc_scale_table;
706 s->chroma_qscale_table = ff_default_chroma_qscale_table;
707 s->progressive_frame = 1;
708 s->progressive_sequence = 1;
709 s->picture_structure = PICT_FRAME;
710
711 s->coded_picture_number = 0;
712 s->picture_number = 0;
713 s->input_picture_number = 0;
3edcacde
MN
714
715 s->picture_in_gop_number = 0;
7976241a 716
4d9ec050
KT
717 s->f_code = 1;
718 s->b_code = 1;
6a9c8594 719
4d9ec050
KT
720 s->picture_range_start = 0;
721 s->picture_range_end = MAX_PICTURE_COUNT;
881a5e04
JG
722
723 s->slice_context_count = 1;
3edcacde
MN
724}
725
726/**
58c42af7 727 * Set the given MpegEncContext to defaults for decoding.
4d9ec050
KT
728 * the changed fields will not depend upon
729 * the prior state of the MpegEncContext.
3edcacde 730 */
efd29844 731void ff_MPV_decode_defaults(MpegEncContext *s)
4d9ec050 732{
efd29844 733 ff_MPV_common_defaults(s);
3edcacde
MN
734}
735
54974c62
AK
736static int init_er(MpegEncContext *s)
737{
738 ERContext *er = &s->er;
739 int mb_array_size = s->mb_height * s->mb_stride;
740 int i;
741
742 er->avctx = s->avctx;
743 er->dsp = &s->dsp;
744
745 er->mb_index2xy = s->mb_index2xy;
746 er->mb_num = s->mb_num;
747 er->mb_width = s->mb_width;
748 er->mb_height = s->mb_height;
749 er->mb_stride = s->mb_stride;
750 er->b8_stride = s->b8_stride;
751
752 er->er_temp_buffer = av_malloc(s->mb_height * s->mb_stride);
753 er->error_status_table = av_mallocz(mb_array_size);
754 if (!er->er_temp_buffer || !er->error_status_table)
755 goto fail;
756
757 er->mbskip_table = s->mbskip_table;
758 er->mbintra_table = s->mbintra_table;
759
760 for (i = 0; i < FF_ARRAY_ELEMS(s->dc_val); i++)
761 er->dc_val[i] = s->dc_val[i];
762
763 er->decode_mb = mpeg_er_decode_mb;
764 er->opaque = s;
765
766 return 0;
767fail:
768 av_freep(&er->er_temp_buffer);
769 av_freep(&er->error_status_table);
770 return AVERROR(ENOMEM);
771}
772
3edcacde 773/**
1b3439b3
JG
774 * Initialize and allocates MpegEncContext fields dependent on the resolution.
775 */
776static int init_context_frame(MpegEncContext *s)
777{
778 int y_size, c_size, yc_size, i, mb_array_size, mv_table_size, x, y;
779
780 s->mb_width = (s->width + 15) / 16;
781 s->mb_stride = s->mb_width + 1;
782 s->b8_stride = s->mb_width * 2 + 1;
783 s->b4_stride = s->mb_width * 4 + 1;
784 mb_array_size = s->mb_height * s->mb_stride;
785 mv_table_size = (s->mb_height + 2) * s->mb_stride + 1;
786
787 /* set default edge pos, will be overriden
788 * in decode_header if needed */
789 s->h_edge_pos = s->mb_width * 16;
790 s->v_edge_pos = s->mb_height * 16;
791
792 s->mb_num = s->mb_width * s->mb_height;
793
794 s->block_wrap[0] =
795 s->block_wrap[1] =
796 s->block_wrap[2] =
797 s->block_wrap[3] = s->b8_stride;
798 s->block_wrap[4] =
799 s->block_wrap[5] = s->mb_stride;
800
801 y_size = s->b8_stride * (2 * s->mb_height + 1);
802 c_size = s->mb_stride * (s->mb_height + 1);
803 yc_size = y_size + 2 * c_size;
804
805 FF_ALLOCZ_OR_GOTO(s->avctx, s->mb_index2xy, (s->mb_num + 1) * sizeof(int),
806 fail); // error ressilience code looks cleaner with this
807 for (y = 0; y < s->mb_height; y++)
808 for (x = 0; x < s->mb_width; x++)
809 s->mb_index2xy[x + y * s->mb_width] = x + y * s->mb_stride;
810
811 s->mb_index2xy[s->mb_height * s->mb_width] =
812 (s->mb_height - 1) * s->mb_stride + s->mb_width; // FIXME really needed?
813
814 if (s->encoding) {
815 /* Allocate MV tables */
816 FF_ALLOCZ_OR_GOTO(s->avctx, s->p_mv_table_base,
817 mv_table_size * 2 * sizeof(int16_t), fail);
818 FF_ALLOCZ_OR_GOTO(s->avctx, s->b_forw_mv_table_base,
819 mv_table_size * 2 * sizeof(int16_t), fail);
820 FF_ALLOCZ_OR_GOTO(s->avctx, s->b_back_mv_table_base,
821 mv_table_size * 2 * sizeof(int16_t), fail);
822 FF_ALLOCZ_OR_GOTO(s->avctx, s->b_bidir_forw_mv_table_base,
823 mv_table_size * 2 * sizeof(int16_t), fail);
824 FF_ALLOCZ_OR_GOTO(s->avctx, s->b_bidir_back_mv_table_base,
825 mv_table_size * 2 * sizeof(int16_t), fail);
826 FF_ALLOCZ_OR_GOTO(s->avctx, s->b_direct_mv_table_base,
827 mv_table_size * 2 * sizeof(int16_t), fail);
828 s->p_mv_table = s->p_mv_table_base + s->mb_stride + 1;
829 s->b_forw_mv_table = s->b_forw_mv_table_base + s->mb_stride + 1;
830 s->b_back_mv_table = s->b_back_mv_table_base + s->mb_stride + 1;
831 s->b_bidir_forw_mv_table = s->b_bidir_forw_mv_table_base +
832 s->mb_stride + 1;
833 s->b_bidir_back_mv_table = s->b_bidir_back_mv_table_base +
834 s->mb_stride + 1;
835 s->b_direct_mv_table = s->b_direct_mv_table_base + s->mb_stride + 1;
836
837 /* Allocate MB type table */
838 FF_ALLOCZ_OR_GOTO(s->avctx, s->mb_type, mb_array_size *
839 sizeof(uint16_t), fail); // needed for encoding
840
841 FF_ALLOCZ_OR_GOTO(s->avctx, s->lambda_table, mb_array_size *
842 sizeof(int), fail);
843
844 FF_ALLOC_OR_GOTO(s->avctx, s->cplx_tab,
845 mb_array_size * sizeof(float), fail);
846 FF_ALLOC_OR_GOTO(s->avctx, s->bits_tab,
847 mb_array_size * sizeof(float), fail);
848
849 }
850
1b3439b3
JG
851 if (s->codec_id == AV_CODEC_ID_MPEG4 ||
852 (s->flags & CODEC_FLAG_INTERLACED_ME)) {
853 /* interlaced direct mode decoding tables */
854 for (i = 0; i < 2; i++) {
855 int j, k;
856 for (j = 0; j < 2; j++) {
857 for (k = 0; k < 2; k++) {
858 FF_ALLOCZ_OR_GOTO(s->avctx,
859 s->b_field_mv_table_base[i][j][k],
860 mv_table_size * 2 * sizeof(int16_t),
861 fail);
862 s->b_field_mv_table[i][j][k] = s->b_field_mv_table_base[i][j][k] +
863 s->mb_stride + 1;
864 }
865 FF_ALLOCZ_OR_GOTO(s->avctx, s->b_field_select_table [i][j],
866 mb_array_size * 2 * sizeof(uint8_t), fail);
867 FF_ALLOCZ_OR_GOTO(s->avctx, s->p_field_mv_table_base[i][j],
868 mv_table_size * 2 * sizeof(int16_t), fail);
869 s->p_field_mv_table[i][j] = s->p_field_mv_table_base[i][j]
870 + s->mb_stride + 1;
871 }
872 FF_ALLOCZ_OR_GOTO(s->avctx, s->p_field_select_table[i],
873 mb_array_size * 2 * sizeof(uint8_t), fail);
874 }
875 }
876 if (s->out_format == FMT_H263) {
877 /* cbp values */
878 FF_ALLOCZ_OR_GOTO(s->avctx, s->coded_block_base, y_size, fail);
879 s->coded_block = s->coded_block_base + s->b8_stride + 1;
880
881 /* cbp, ac_pred, pred_dir */
882 FF_ALLOCZ_OR_GOTO(s->avctx, s->cbp_table,
883 mb_array_size * sizeof(uint8_t), fail);
884 FF_ALLOCZ_OR_GOTO(s->avctx, s->pred_dir_table,
885 mb_array_size * sizeof(uint8_t), fail);
886 }
887
888 if (s->h263_pred || s->h263_plus || !s->encoding) {
889 /* dc values */
890 // MN: we need these for error resilience of intra-frames
891 FF_ALLOCZ_OR_GOTO(s->avctx, s->dc_val_base,
892 yc_size * sizeof(int16_t), fail);
893 s->dc_val[0] = s->dc_val_base + s->b8_stride + 1;
894 s->dc_val[1] = s->dc_val_base + y_size + s->mb_stride + 1;
895 s->dc_val[2] = s->dc_val[1] + c_size;
896 for (i = 0; i < yc_size; i++)
897 s->dc_val_base[i] = 1024;
898 }
899
900 /* which mb is a intra block */
901 FF_ALLOCZ_OR_GOTO(s->avctx, s->mbintra_table, mb_array_size, fail);
902 memset(s->mbintra_table, 1, mb_array_size);
903
904 /* init macroblock skip table */
905 FF_ALLOCZ_OR_GOTO(s->avctx, s->mbskip_table, mb_array_size + 2, fail);
906 // Note the + 1 is for a quicker mpeg4 slice_end detection
907
908 if ((s->avctx->debug & (FF_DEBUG_VIS_QP | FF_DEBUG_VIS_MB_TYPE)) ||
909 s->avctx->debug_mv) {
910 s->visualization_buffer[0] = av_malloc((s->mb_width * 16 +
911 2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
912 s->visualization_buffer[1] = av_malloc((s->mb_width * 16 +
913 2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
914 s->visualization_buffer[2] = av_malloc((s->mb_width * 16 +
915 2 * EDGE_WIDTH) * s->mb_height * 16 + 2 * EDGE_WIDTH);
916 }
917
54974c62 918 return init_er(s);
1b3439b3
JG
919fail:
920 return AVERROR(ENOMEM);
921}
922
923/**
3edcacde
MN
924 * init common structure for both encoder and decoder.
925 * this assumes that some variables like width/height are already set
926 */
efd29844 927av_cold int ff_MPV_common_init(MpegEncContext *s)
defdfc9a 928{
7e76fc52 929 int i;
881a5e04
JG
930 int nb_slices = (HAVE_THREADS &&
931 s->avctx->active_thread_type & FF_THREAD_SLICE) ?
932 s->avctx->thread_count : 1;
933
934 if (s->encoding && s->avctx->slices)
935 nb_slices = s->avctx->slices;
defdfc9a 936
36ef5369 937 if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO && !s->progressive_sequence)
0127b861 938 s->mb_height = (s->height + 31) / 32 * 2;
36ef5369 939 else if (s->codec_id != AV_CODEC_ID_H264)
1b661802 940 s->mb_height = (s->height + 15) / 16;
fdb52bcc 941
716d413c 942 if (s->avctx->pix_fmt == AV_PIX_FMT_NONE) {
4d9ec050 943 av_log(s->avctx, AV_LOG_ERROR,
716d413c 944 "decoding to AV_PIX_FMT_NONE is not supported.\n");
9cfc1b3a
IK
945 return -1;
946 }
947
881a5e04
JG
948 if (nb_slices > MAX_THREADS || (nb_slices > s->mb_height && s->mb_height)) {
949 int max_slices;
950 if (s->mb_height)
951 max_slices = FFMIN(MAX_THREADS, s->mb_height);
952 else
953 max_slices = MAX_THREADS;
954 av_log(s->avctx, AV_LOG_WARNING, "too many threads/slices (%d),"
955 " reducing to %d\n", nb_slices, max_slices);
956 nb_slices = max_slices;
000a9c02
MN
957 }
958
4d9ec050
KT
959 if ((s->width || s->height) &&
960 av_image_check_size(s->width, s->height, 0, s->avctx))
0ecca7a4
MN
961 return -1;
962
6180ade7 963 ff_dct_common_init(s);
eb4b3dd3 964
4d9ec050
KT
965 s->flags = s->avctx->flags;
966 s->flags2 = s->avctx->flags2;
defdfc9a 967
fb22c237 968 if (s->width && s->height) {
d969e93a 969 /* set chroma shifts */
c1a02e88
LB
970 av_pix_fmt_get_chroma_sub_sample(s->avctx->pix_fmt,
971 &s->chroma_x_shift,
972 &s->chroma_y_shift);
eb4b3dd3 973
d969e93a 974 /* convert fourcc to upper case */
4d9ec050 975 s->codec_tag = avpriv_toupper4(s->avctx->codec_tag);
115329f1 976
4d9ec050 977 s->stream_codec_tag = avpriv_toupper4(s->avctx->stream_codec_tag);
603a5f04 978
47c0ac96 979 s->avctx->coded_frame = &s->current_picture.f;
541ae140 980
d969e93a 981 if (s->encoding) {
4d9ec050
KT
982 if (s->msmpeg4_version) {
983 FF_ALLOCZ_OR_GOTO(s->avctx, s->ac_stats,
984 2 * 2 * (MAX_LEVEL + 1) *
985 (MAX_RUN + 1) * 2 * sizeof(int), fail);
d969e93a
RB
986 }
987 FF_ALLOCZ_OR_GOTO(s->avctx, s->avctx->stats_out, 256, fail);
7bc9090a 988
4d9ec050
KT
989 FF_ALLOCZ_OR_GOTO(s->avctx, s->q_intra_matrix,
990 64 * 32 * sizeof(int), fail);
991 FF_ALLOCZ_OR_GOTO(s->avctx, s->q_inter_matrix,
992 64 * 32 * sizeof(int), fail);
993 FF_ALLOCZ_OR_GOTO(s->avctx, s->q_intra_matrix16,
994 64 * 32 * 2 * sizeof(uint16_t), fail);
995 FF_ALLOCZ_OR_GOTO(s->avctx, s->q_inter_matrix16,
996 64 * 32 * 2 * sizeof(uint16_t), fail);
997 FF_ALLOCZ_OR_GOTO(s->avctx, s->input_picture,
998 MAX_PICTURE_COUNT * sizeof(Picture *), fail);
999 FF_ALLOCZ_OR_GOTO(s->avctx, s->reordered_input_picture,
1000 MAX_PICTURE_COUNT * sizeof(Picture *), fail);
1001
1002 if (s->avctx->noise_reduction) {
1003 FF_ALLOCZ_OR_GOTO(s->avctx, s->dct_offset,
1004 2 * 64 * sizeof(uint16_t), fail);
d969e93a 1005 }
821cb11f 1006 }
37fbfd0a 1007 }
fb22c237 1008
6a9c8594 1009 s->picture_count = MAX_PICTURE_COUNT * FFMAX(1, s->avctx->thread_count);
4d9ec050
KT
1010 FF_ALLOCZ_OR_GOTO(s->avctx, s->picture,
1011 s->picture_count * sizeof(Picture), fail);
1012 for (i = 0; i < s->picture_count; i++) {
47c0ac96 1013 avcodec_get_frame_defaults(&s->picture[i].f);
747069e2 1014 }
b465449e 1015
fb22c237 1016 if (s->width && s->height) {
7e76fc52 1017 if (init_context_frame(s))
1b3439b3 1018 goto fail;
4d9ec050
KT
1019
1020 s->parse_context.state = -1;
fb22c237 1021 }
d7425f59 1022
de6d9b64 1023 s->context_initialized = 1;
4d9ec050 1024 s->thread_context[0] = s;
9c3d33d6 1025
fb22c237 1026 if (s->width && s->height) {
881a5e04
JG
1027 if (nb_slices > 1) {
1028 for (i = 1; i < nb_slices; i++) {
4d9ec050
KT
1029 s->thread_context[i] = av_malloc(sizeof(MpegEncContext));
1030 memcpy(s->thread_context[i], s, sizeof(MpegEncContext));
1031 }
9c3d33d6 1032
881a5e04 1033 for (i = 0; i < nb_slices; i++) {
12b54a1f 1034 if (init_duplicate_context(s->thread_context[i]) < 0)
4d9ec050
KT
1035 goto fail;
1036 s->thread_context[i]->start_mb_y =
881a5e04 1037 (s->mb_height * (i) + nb_slices / 2) / nb_slices;
4d9ec050 1038 s->thread_context[i]->end_mb_y =
881a5e04 1039 (s->mb_height * (i + 1) + nb_slices / 2) / nb_slices;
4d9ec050
KT
1040 }
1041 } else {
12b54a1f 1042 if (init_duplicate_context(s) < 0)
d969e93a 1043 goto fail;
4d9ec050
KT
1044 s->start_mb_y = 0;
1045 s->end_mb_y = s->mb_height;
d969e93a 1046 }
881a5e04 1047 s->slice_context_count = nb_slices;
fb22c237
RB
1048 }
1049
de6d9b64
FB
1050 return 0;
1051 fail:
efd29844 1052 ff_MPV_common_end(s);
de6d9b64
FB
1053 return -1;
1054}
1055
1b3439b3
JG
1056/**
1057 * Frees and resets MpegEncContext fields depending on the resolution.
1058 * Is used during resolution changes to avoid a full reinitialization of the
1059 * codec.
1060 */
1061static int free_context_frame(MpegEncContext *s)
de6d9b64 1062{
bb198e19 1063 int i, j, k;
de6d9b64 1064
6000abfa 1065 av_freep(&s->mb_type);
7bc9090a
MN
1066 av_freep(&s->p_mv_table_base);
1067 av_freep(&s->b_forw_mv_table_base);
1068 av_freep(&s->b_back_mv_table_base);
1069 av_freep(&s->b_bidir_forw_mv_table_base);
1070 av_freep(&s->b_bidir_back_mv_table_base);
1071 av_freep(&s->b_direct_mv_table_base);
4d9ec050
KT
1072 s->p_mv_table = NULL;
1073 s->b_forw_mv_table = NULL;
1074 s->b_back_mv_table = NULL;
1075 s->b_bidir_forw_mv_table = NULL;
1076 s->b_bidir_back_mv_table = NULL;
1077 s->b_direct_mv_table = NULL;
1078 for (i = 0; i < 2; i++) {
1079 for (j = 0; j < 2; j++) {
1080 for (k = 0; k < 2; k++) {
bb198e19 1081 av_freep(&s->b_field_mv_table_base[i][j][k]);
4d9ec050 1082 s->b_field_mv_table[i][j][k] = NULL;
bb198e19
MN
1083 }
1084 av_freep(&s->b_field_select_table[i][j]);
1085 av_freep(&s->p_field_mv_table_base[i][j]);
4d9ec050 1086 s->p_field_mv_table[i][j] = NULL;
bb198e19
MN
1087 }
1088 av_freep(&s->p_field_select_table[i]);
1089 }
115329f1 1090
137c8468 1091 av_freep(&s->dc_val_base);
137c8468 1092 av_freep(&s->coded_block_base);
6000abfa 1093 av_freep(&s->mbintra_table);
7f2fe444
MN
1094 av_freep(&s->cbp_table);
1095 av_freep(&s->pred_dir_table);
115329f1 1096
6000abfa 1097 av_freep(&s->mbskip_table);
0ecca7a4 1098
54974c62
AK
1099 av_freep(&s->er.error_status_table);
1100 av_freep(&s->er.er_temp_buffer);
7bc9090a 1101 av_freep(&s->mb_index2xy);
158c7f05 1102 av_freep(&s->lambda_table);
1b3439b3
JG
1103 av_freep(&s->cplx_tab);
1104 av_freep(&s->bits_tab);
1105
1106 s->linesize = s->uvlinesize = 0;
1107
1108 for (i = 0; i < 3; i++)
1109 av_freep(&s->visualization_buffer[i]);
1110
1b3439b3
JG
1111 return 0;
1112}
1113
435c0b87
JG
1114int ff_MPV_common_frame_size_change(MpegEncContext *s)
1115{
1116 int i, err = 0;
1117
1118 if (s->slice_context_count > 1) {
1119 for (i = 0; i < s->slice_context_count; i++) {
1120 free_duplicate_context(s->thread_context[i]);
1121 }
1122 for (i = 1; i < s->slice_context_count; i++) {
1123 av_freep(&s->thread_context[i]);
1124 }
1125 } else
1126 free_duplicate_context(s);
1127
1128 free_context_frame(s);
1129
1130 if (s->picture)
1131 for (i = 0; i < s->picture_count; i++) {
1132 s->picture[i].needs_realloc = 1;
1133 }
1134
1135 s->last_picture_ptr =
1136 s->next_picture_ptr =
1137 s->current_picture_ptr = NULL;
1138
1139 // init
1140 if (s->codec_id == AV_CODEC_ID_MPEG2VIDEO && !s->progressive_sequence)
1141 s->mb_height = (s->height + 31) / 32 * 2;
1142 else if (s->codec_id != AV_CODEC_ID_H264)
1143 s->mb_height = (s->height + 15) / 16;
1144
1145 if ((s->width || s->height) &&
1146 av_image_check_size(s->width, s->height, 0, s->avctx))
1147 return AVERROR_INVALIDDATA;
1148
1149 if ((err = init_context_frame(s)))
1150 goto fail;
1151
1152 s->thread_context[0] = s;
1153
1154 if (s->width && s->height) {
1155 int nb_slices = s->slice_context_count;
1156 if (nb_slices > 1) {
1157 for (i = 1; i < nb_slices; i++) {
1158 s->thread_context[i] = av_malloc(sizeof(MpegEncContext));
1159 memcpy(s->thread_context[i], s, sizeof(MpegEncContext));
1160 }
1161
1162 for (i = 0; i < nb_slices; i++) {
12b54a1f 1163 if (init_duplicate_context(s->thread_context[i]) < 0)
435c0b87
JG
1164 goto fail;
1165 s->thread_context[i]->start_mb_y =
1166 (s->mb_height * (i) + nb_slices / 2) / nb_slices;
1167 s->thread_context[i]->end_mb_y =
1168 (s->mb_height * (i + 1) + nb_slices / 2) / nb_slices;
1169 }
1170 } else {
12b54a1f 1171 if (init_duplicate_context(s) < 0)
435c0b87
JG
1172 goto fail;
1173 s->start_mb_y = 0;
1174 s->end_mb_y = s->mb_height;
1175 }
1176 s->slice_context_count = nb_slices;
1177 }
1178
1179 return 0;
1180 fail:
1181 ff_MPV_common_end(s);
1182 return err;
1183}
1184
1b3439b3
JG
1185/* init common structure for both encoder and decoder */
1186void ff_MPV_common_end(MpegEncContext *s)
1187{
1188 int i;
1189
1190 if (s->slice_context_count > 1) {
1191 for (i = 0; i < s->slice_context_count; i++) {
1192 free_duplicate_context(s->thread_context[i]);
1193 }
1194 for (i = 1; i < s->slice_context_count; i++) {
1195 av_freep(&s->thread_context[i]);
1196 }
1197 s->slice_context_count = 1;
1198 } else free_duplicate_context(s);
1199
1200 av_freep(&s->parse_context.buffer);
1201 s->parse_context.buffer_size = 0;
1202
1203 av_freep(&s->bitstream_buffer);
1204 s->allocated_bitstream_buffer_size = 0;
1205
1206 av_freep(&s->avctx->stats_out);
1207 av_freep(&s->ac_stats);
1208
7e4995c3
MN
1209 av_freep(&s->q_intra_matrix);
1210 av_freep(&s->q_inter_matrix);
642ccefb
MN
1211 av_freep(&s->q_intra_matrix16);
1212 av_freep(&s->q_inter_matrix16);
9d9e3172
MN
1213 av_freep(&s->input_picture);
1214 av_freep(&s->reordered_input_picture);
821cb11f 1215 av_freep(&s->dct_offset);
1b3439b3 1216
4d9ec050
KT
1217 if (s->picture && !s->avctx->internal->is_copy) {
1218 for (i = 0; i < s->picture_count; i++) {
9b4b6e09
MN
1219 free_picture(s, &s->picture[i]);
1220 }
de6d9b64 1221 }
b465449e 1222 av_freep(&s->picture);
32c7589b
JG
1223
1224 free_context_frame(s);
1225
6a27ae28
JG
1226 if (!(s->avctx->active_thread_type & FF_THREAD_FRAME))
1227 avcodec_default_free_buffers(s->avctx);
1228
4d9ec050
KT
1229 s->context_initialized = 0;
1230 s->last_picture_ptr =
1231 s->next_picture_ptr =
1232 s->current_picture_ptr = NULL;
1233 s->linesize = s->uvlinesize = 0;
de6d9b64
FB
1234}
1235
e96b4a53
MS
1236void ff_init_rl(RLTable *rl,
1237 uint8_t static_store[2][2 * MAX_RUN + MAX_LEVEL + 3])
1d0d55da 1238{
4d9ec050
KT
1239 int8_t max_level[MAX_RUN + 1], max_run[MAX_LEVEL + 1];
1240 uint8_t index_run[MAX_RUN + 1];
1d0d55da
MN
1241 int last, run, level, start, end, i;
1242
c65dfac4 1243 /* If table is static, we can quit if rl->max_level[0] is not NULL */
4d9ec050 1244 if (static_store && rl->max_level[0])
073c2593
BP
1245 return;
1246
1d0d55da 1247 /* compute max_level[], max_run[] and index_run[] */
4d9ec050 1248 for (last = 0; last < 2; last++) {
1d0d55da
MN
1249 if (last == 0) {
1250 start = 0;
1251 end = rl->last;
1252 } else {
1253 start = rl->last;
1254 end = rl->n;
1255 }
1256
1257 memset(max_level, 0, MAX_RUN + 1);
1258 memset(max_run, 0, MAX_LEVEL + 1);
1259 memset(index_run, rl->n, MAX_RUN + 1);
4d9ec050
KT
1260 for (i = start; i < end; i++) {
1261 run = rl->table_run[i];
1d0d55da
MN
1262 level = rl->table_level[i];
1263 if (index_run[run] == rl->n)
1264 index_run[run] = i;
1265 if (level > max_level[run])
1266 max_level[run] = level;
1267 if (run > max_run[level])
1268 max_run[level] = run;
1269 }
4d9ec050 1270 if (static_store)
3502a54f 1271 rl->max_level[last] = static_store[last];
073c2593
BP
1272 else
1273 rl->max_level[last] = av_malloc(MAX_RUN + 1);
1d0d55da 1274 memcpy(rl->max_level[last], max_level, MAX_RUN + 1);
4d9ec050
KT
1275 if (static_store)
1276 rl->max_run[last] = static_store[last] + MAX_RUN + 1;
073c2593 1277 else
4d9ec050 1278 rl->max_run[last] = av_malloc(MAX_LEVEL + 1);
1d0d55da 1279 memcpy(rl->max_run[last], max_run, MAX_LEVEL + 1);
4d9ec050 1280 if (static_store)
3502a54f 1281 rl->index_run[last] = static_store[last] + MAX_RUN + MAX_LEVEL + 2;
073c2593
BP
1282 else
1283 rl->index_run[last] = av_malloc(MAX_RUN + 1);
1d0d55da
MN
1284 memcpy(rl->index_run[last], index_run, MAX_RUN + 1);
1285 }
1286}
1287
e96b4a53 1288void ff_init_vlc_rl(RLTable *rl)
898d5d5d
AJ
1289{
1290 int i, q;
1291
4d9ec050
KT
1292 for (q = 0; q < 32; q++) {
1293 int qmul = q * 2;
1294 int qadd = (q - 1) | 1;
898d5d5d 1295
4d9ec050
KT
1296 if (q == 0) {
1297 qmul = 1;
1298 qadd = 0;
898d5d5d 1299 }
4d9ec050
KT
1300 for (i = 0; i < rl->vlc.table_size; i++) {
1301 int code = rl->vlc.table[i][0];
1302 int len = rl->vlc.table[i][1];
898d5d5d
AJ
1303 int level, run;
1304
4d9ec050
KT
1305 if (len == 0) { // illegal code
1306 run = 66;
1307 level = MAX_LEVEL;
1308 } else if (len < 0) { // more bits needed
1309 run = 0;
1310 level = code;
1311 } else {
1312 if (code == rl->n) { // esc
1313 run = 66;
1314 level = 0;
1315 } else {
1316 run = rl->table_run[code] + 1;
1317 level = rl->table_level[code] * qmul + qadd;
1318 if (code >= rl->last) run += 192;
898d5d5d
AJ
1319 }
1320 }
4d9ec050
KT
1321 rl->rl_vlc[q][i].len = len;
1322 rl->rl_vlc[q][i].level = level;
1323 rl->rl_vlc[q][i].run = run;
898d5d5d
AJ
1324 }
1325 }
1326}
1327
4d9ec050 1328void ff_release_unused_pictures(MpegEncContext*s, int remove_current)
6a9c8594
AS
1329{
1330 int i;
1331
1332 /* release non reference frames */
4d9ec050
KT
1333 for (i = 0; i < s->picture_count; i++) {
1334 if (s->picture[i].f.data[0] && !s->picture[i].f.reference &&
1335 (!s->picture[i].owner2 || s->picture[i].owner2 == s) &&
1336 (remove_current || &s->picture[i] != s->current_picture_ptr)
1337 /* && s->picture[i].type!= FF_BUFFER_TYPE_SHARED */) {
6a9c8594
AS
1338 free_frame_buffer(s, &s->picture[i]);
1339 }
1340 }
1341}
1342
435c0b87
JG
1343static inline int pic_is_unused(MpegEncContext *s, Picture *pic)
1344{
1345 if (pic->f.data[0] == NULL)
1346 return 1;
6a27ae28 1347 if (pic->needs_realloc && !(pic->f.reference & DELAYED_PIC_REF))
435c0b87
JG
1348 if (!pic->owner2 || pic->owner2 == s)
1349 return 1;
1350 return 0;
1351}
1352
1353static int find_unused_picture(MpegEncContext *s, int shared)
4d9ec050 1354{
4e00e76b 1355 int i;
115329f1 1356
4d9ec050
KT
1357 if (shared) {
1358 for (i = s->picture_range_start; i < s->picture_range_end; i++) {
657ccb5a
DB
1359 if (s->picture[i].f.data[0] == NULL && s->picture[i].f.type == 0)
1360 return i;
4e00e76b 1361 }
4d9ec050
KT
1362 } else {
1363 for (i = s->picture_range_start; i < s->picture_range_end; i++) {
435c0b87 1364 if (pic_is_unused(s, &s->picture[i]) && s->picture[i].f.type != 0)
4d9ec050 1365 return i; // FIXME
4e00e76b 1366 }
4d9ec050 1367 for (i = s->picture_range_start; i < s->picture_range_end; i++) {
435c0b87 1368 if (pic_is_unused(s, &s->picture[i]))
657ccb5a 1369 return i;
4e00e76b
MN
1370 }
1371 }
1372
4f820131 1373 return AVERROR_INVALIDDATA;
4e00e76b
MN
1374}
1375
435c0b87
JG
1376int ff_find_unused_picture(MpegEncContext *s, int shared)
1377{
1378 int ret = find_unused_picture(s, shared);
1379
1380 if (ret >= 0 && ret < s->picture_range_end) {
1381 if (s->picture[ret].needs_realloc) {
1382 s->picture[ret].needs_realloc = 0;
1383 free_picture(s, &s->picture[ret]);
1384 avcodec_get_frame_defaults(&s->picture[ret].f);
1385 }
1386 }
1387 return ret;
1388}
1389
c65dfac4
KT
1390static void update_noise_reduction(MpegEncContext *s)
1391{
821cb11f
MN
1392 int intra, i;
1393
c65dfac4
KT
1394 for (intra = 0; intra < 2; intra++) {
1395 if (s->dct_count[intra] > (1 << 16)) {
1396 for (i = 0; i < 64; i++) {
1397 s->dct_error_sum[intra][i] >>= 1;
821cb11f
MN
1398 }
1399 s->dct_count[intra] >>= 1;
1400 }
115329f1 1401
c65dfac4
KT
1402 for (i = 0; i < 64; i++) {
1403 s->dct_offset[intra][i] = (s->avctx->noise_reduction *
1404 s->dct_count[intra] +
1405 s->dct_error_sum[intra][i] / 2) /
1406 (s->dct_error_sum[intra][i] + 1);
821cb11f
MN
1407 }
1408 }
1409}
1410
5f194811 1411/**
c65dfac4
KT
1412 * generic function for encode/decode called after coding/decoding
1413 * the header and before a frame is coded/decoded.
5f194811 1414 */
efd29844 1415int ff_MPV_frame_start(MpegEncContext *s, AVCodecContext *avctx)
de6d9b64 1416{
4e00e76b 1417 int i;
357ec71f 1418 Picture *pic;
160d679c 1419 s->mb_skipped = 0;
0da71265 1420
c65dfac4 1421 /* mark & release old frames */
36ef5369 1422 if (s->out_format != FMT_H264 || s->codec_id == AV_CODEC_ID_SVQ3) {
3ab77000
JG
1423 if (s->pict_type != AV_PICTURE_TYPE_B && s->last_picture_ptr &&
1424 s->last_picture_ptr != s->next_picture_ptr &&
1425 s->last_picture_ptr->f.data[0]) {
c65dfac4
KT
1426 if (s->last_picture_ptr->owner2 == s)
1427 free_frame_buffer(s, s->last_picture_ptr);
3ab77000 1428 }
c65dfac4 1429
3ab77000
JG
1430 /* release forgotten pictures */
1431 /* if (mpeg124/h263) */
1432 if (!s->encoding) {
1433 for (i = 0; i < s->picture_count; i++) {
1434 if (s->picture[i].owner2 == s && s->picture[i].f.data[0] &&
1435 &s->picture[i] != s->last_picture_ptr &&
1436 &s->picture[i] != s->next_picture_ptr &&
435c0b87 1437 s->picture[i].f.reference && !s->picture[i].needs_realloc) {
3ab77000
JG
1438 if (!(avctx->active_thread_type & FF_THREAD_FRAME))
1439 av_log(avctx, AV_LOG_ERROR,
1440 "releasing zombie picture\n");
1441 free_frame_buffer(s, &s->picture[i]);
1e491e29
MN
1442 }
1443 }
d6db1c9c 1444 }
93a21abd 1445 }
d52b4abe 1446
c65dfac4 1447 if (!s->encoding) {
6a9c8594 1448 ff_release_unused_pictures(s, 1);
e20c4069 1449
c65dfac4
KT
1450 if (s->current_picture_ptr &&
1451 s->current_picture_ptr->f.data[0] == NULL) {
1452 // we already have a unused image
1453 // (maybe it was set before reading the header)
1454 pic = s->current_picture_ptr;
1455 } else {
1456 i = ff_find_unused_picture(s, 0);
01fc5d66
JG
1457 if (i < 0) {
1458 av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1459 return i;
1460 }
c65dfac4 1461 pic = &s->picture[i];
5f194811
MN
1462 }
1463
657ccb5a 1464 pic->f.reference = 0;
ba0c8981 1465 if (!s->droppable) {
36ef5369 1466 if (s->codec_id == AV_CODEC_ID_H264)
657ccb5a 1467 pic->f.reference = s->picture_structure;
975a1447 1468 else if (s->pict_type != AV_PICTURE_TYPE_B)
657ccb5a 1469 pic->f.reference = 3;
2ddcf84b 1470 }
b536d0aa 1471
657ccb5a 1472 pic->f.coded_picture_number = s->coded_picture_number++;
115329f1 1473
c65dfac4 1474 if (ff_alloc_picture(s, pic, 0) < 0)
f23a68df 1475 return -1;
93a21abd 1476
c65dfac4
KT
1477 s->current_picture_ptr = pic;
1478 // FIXME use only the vars from current_pic
657ccb5a 1479 s->current_picture_ptr->f.top_field_first = s->top_field_first;
36ef5369
AK
1480 if (s->codec_id == AV_CODEC_ID_MPEG1VIDEO ||
1481 s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
c65dfac4
KT
1482 if (s->picture_structure != PICT_FRAME)
1483 s->current_picture_ptr->f.top_field_first =
1484 (s->picture_structure == PICT_TOP_FIELD) == s->first_field;
52654005 1485 }
c65dfac4
KT
1486 s->current_picture_ptr->f.interlaced_frame = !s->progressive_frame &&
1487 !s->progressive_sequence;
1488 s->current_picture_ptr->field_picture = s->picture_structure != PICT_FRAME;
1e491e29 1489 }
b7adc711 1490
657ccb5a 1491 s->current_picture_ptr->f.pict_type = s->pict_type;
c65dfac4
KT
1492 // if (s->flags && CODEC_FLAG_QSCALE)
1493 // s->current_picture_ptr->quality = s->new_picture_ptr->quality;
657ccb5a 1494 s->current_picture_ptr->f.key_frame = s->pict_type == AV_PICTURE_TYPE_I;
9f2e61b6 1495
8d2fc163 1496 ff_copy_picture(&s->current_picture, s->current_picture_ptr);
115329f1 1497
975a1447 1498 if (s->pict_type != AV_PICTURE_TYPE_B) {
c65dfac4 1499 s->last_picture_ptr = s->next_picture_ptr;
ba0c8981 1500 if (!s->droppable)
c65dfac4 1501 s->next_picture_ptr = s->current_picture_ptr;
de6d9b64 1502 }
1218777f
DB
1503 av_dlog(s->avctx, "L%p N%p C%p L%p N%p C%p type:%d drop:%d\n",
1504 s->last_picture_ptr, s->next_picture_ptr,s->current_picture_ptr,
1505 s->last_picture_ptr ? s->last_picture_ptr->f.data[0] : NULL,
1506 s->next_picture_ptr ? s->next_picture_ptr->f.data[0] : NULL,
1507 s->current_picture_ptr ? s->current_picture_ptr->f.data[0] : NULL,
ba0c8981 1508 s->pict_type, s->droppable);
c65dfac4 1509
36ef5369 1510 if (s->codec_id != AV_CODEC_ID_H264) {
c65dfac4
KT
1511 if ((s->last_picture_ptr == NULL ||
1512 s->last_picture_ptr->f.data[0] == NULL) &&
1513 (s->pict_type != AV_PICTURE_TYPE_I ||
1514 s->picture_structure != PICT_FRAME)) {
293065bd
AK
1515 int h_chroma_shift, v_chroma_shift;
1516 av_pix_fmt_get_chroma_sub_sample(s->avctx->pix_fmt,
1517 &h_chroma_shift, &v_chroma_shift);
b44c8ad2 1518 if (s->pict_type != AV_PICTURE_TYPE_I)
c65dfac4
KT
1519 av_log(avctx, AV_LOG_ERROR,
1520 "warning: first frame is no keyframe\n");
b44c8ad2 1521 else if (s->picture_structure != PICT_FRAME)
c65dfac4
KT
1522 av_log(avctx, AV_LOG_INFO,
1523 "allocate dummy last picture for field based first keyframe\n");
b44c8ad2 1524
d52b4abe 1525 /* Allocate a dummy frame */
c65dfac4 1526 i = ff_find_unused_picture(s, 0);
01fc5d66
JG
1527 if (i < 0) {
1528 av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1529 return i;
1530 }
c65dfac4 1531 s->last_picture_ptr = &s->picture[i];
5ab506a5
JG
1532 if (ff_alloc_picture(s, s->last_picture_ptr, 0) < 0) {
1533 s->last_picture_ptr = NULL;
d52b4abe 1534 return -1;
5ab506a5 1535 }
293065bd
AK
1536
1537 memset(s->last_picture_ptr->f.data[0], 0,
1538 avctx->height * s->last_picture_ptr->f.linesize[0]);
1539 memset(s->last_picture_ptr->f.data[1], 0x80,
1540 (avctx->height >> v_chroma_shift) *
1541 s->last_picture_ptr->f.linesize[1]);
1542 memset(s->last_picture_ptr->f.data[2], 0x80,
1543 (avctx->height >> v_chroma_shift) *
1544 s->last_picture_ptr->f.linesize[2]);
1545
47c0ac96
DB
1546 ff_thread_report_progress(&s->last_picture_ptr->f, INT_MAX, 0);
1547 ff_thread_report_progress(&s->last_picture_ptr->f, INT_MAX, 1);
7b60799a 1548 s->last_picture_ptr->f.reference = 3;
d52b4abe 1549 }
c65dfac4
KT
1550 if ((s->next_picture_ptr == NULL ||
1551 s->next_picture_ptr->f.data[0] == NULL) &&
1552 s->pict_type == AV_PICTURE_TYPE_B) {
d52b4abe 1553 /* Allocate a dummy frame */
c65dfac4 1554 i = ff_find_unused_picture(s, 0);
01fc5d66
JG
1555 if (i < 0) {
1556 av_log(s->avctx, AV_LOG_ERROR, "no frame buffer available\n");
1557 return i;
1558 }
c65dfac4 1559 s->next_picture_ptr = &s->picture[i];
5ab506a5
JG
1560 if (ff_alloc_picture(s, s->next_picture_ptr, 0) < 0) {
1561 s->next_picture_ptr = NULL;
d52b4abe 1562 return -1;
5ab506a5 1563 }
47c0ac96
DB
1564 ff_thread_report_progress(&s->next_picture_ptr->f, INT_MAX, 0);
1565 ff_thread_report_progress(&s->next_picture_ptr->f, INT_MAX, 1);
7b60799a 1566 s->next_picture_ptr->f.reference = 3;
d52b4abe
MN
1567 }
1568 }
1569
c65dfac4
KT
1570 if (s->last_picture_ptr)
1571 ff_copy_picture(&s->last_picture, s->last_picture_ptr);
1572 if (s->next_picture_ptr)
1573 ff_copy_picture(&s->next_picture, s->next_picture_ptr);
115329f1 1574
d5e83122 1575 if (HAVE_THREADS && (avctx->active_thread_type & FF_THREAD_FRAME)) {
3ab77000
JG
1576 if (s->next_picture_ptr)
1577 s->next_picture_ptr->owner2 = s;
1578 if (s->last_picture_ptr)
1579 s->last_picture_ptr->owner2 = s;
1580 }
1581
c65dfac4
KT
1582 assert(s->pict_type == AV_PICTURE_TYPE_I || (s->last_picture_ptr &&
1583 s->last_picture_ptr->f.data[0]));
ffba1dc0 1584
c65dfac4 1585 if (s->picture_structure!= PICT_FRAME && s->out_format != FMT_H264) {
b536d0aa 1586 int i;
c65dfac4
KT
1587 for (i = 0; i < 4; i++) {
1588 if (s->picture_structure == PICT_BOTTOM_FIELD) {
1589 s->current_picture.f.data[i] +=
1590 s->current_picture.f.linesize[i];
115329f1 1591 }
657ccb5a
DB
1592 s->current_picture.f.linesize[i] *= 2;
1593 s->last_picture.f.linesize[i] *= 2;
1594 s->next_picture.f.linesize[i] *= 2;
b536d0aa
MN
1595 }
1596 }
115329f1 1597
5b22d6e1 1598 s->err_recognition = avctx->err_recognition;
aa388dba 1599
c65dfac4
KT
1600 /* set dequantizer, we can't do it during init as
1601 * it might change for mpeg4 and we can't do it in the header
1602 * decode as init is not called for mpeg4 there yet */
36ef5369 1603 if (s->mpeg_quant || s->codec_id == AV_CODEC_ID_MPEG2VIDEO) {
d50635cd
MN
1604 s->dct_unquantize_intra = s->dct_unquantize_mpeg2_intra;
1605 s->dct_unquantize_inter = s->dct_unquantize_mpeg2_inter;
c65dfac4 1606 } else if (s->out_format == FMT_H263 || s->out_format == FMT_H261) {
d50635cd
MN
1607 s->dct_unquantize_intra = s->dct_unquantize_h263_intra;
1608 s->dct_unquantize_inter = s->dct_unquantize_h263_inter;
c65dfac4 1609 } else {
d50635cd
MN
1610 s->dct_unquantize_intra = s->dct_unquantize_mpeg1_intra;
1611 s->dct_unquantize_inter = s->dct_unquantize_mpeg1_inter;
1612 }
d6db1c9c 1613
c65dfac4 1614 if (s->dct_error_sum) {
821cb11f 1615 assert(s->avctx->noise_reduction && s->encoding);
821cb11f
MN
1616 update_noise_reduction(s);
1617 }
115329f1 1618
c65dfac4 1619 if (CONFIG_MPEG_XVMC_DECODER && s->avctx->xvmc_acceleration)
a002da79 1620 return ff_xvmc_field_start(s, avctx);
83344066 1621
d6db1c9c 1622 return 0;
de6d9b64 1623}
21af69f7 1624
c65dfac4
KT
1625/* generic function for encode/decode called after a
1626 * frame has been coded/decoded. */
efd29844 1627void ff_MPV_frame_end(MpegEncContext *s)
de6d9b64 1628{
1e491e29 1629 int i;
6a9c8594 1630 /* redraw edges for the frame if decoding didn't complete */
c65dfac4
KT
1631 // just to make sure that all data is rendered.
1632 if (CONFIG_MPEG_XVMC_DECODER && s->avctx->xvmc_acceleration) {
a002da79 1633 ff_xvmc_field_end(s);
54974c62 1634 } else if ((s->er.error_count || s->encoding) &&
c65dfac4
KT
1635 !s->avctx->hwaccel &&
1636 !(s->avctx->codec->capabilities & CODEC_CAP_HWACCEL_VDPAU) &&
1637 s->unrestricted_mv &&
1638 s->current_picture.f.reference &&
1639 !s->intra_only &&
1640 !(s->flags & CODEC_FLAG_EMU_EDGE)) {
50ba57e0
AK
1641 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(s->avctx->pix_fmt);
1642 int hshift = desc->log2_chroma_w;
1643 int vshift = desc->log2_chroma_h;
9953ff3c
AK
1644 s->dsp.draw_edges(s->current_picture.f.data[0], s->linesize,
1645 s->h_edge_pos, s->v_edge_pos,
1646 EDGE_WIDTH, EDGE_WIDTH,
1647 EDGE_TOP | EDGE_BOTTOM);
1648 s->dsp.draw_edges(s->current_picture.f.data[1], s->uvlinesize,
1649 s->h_edge_pos >> hshift, s->v_edge_pos >> vshift,
1650 EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift,
1651 EDGE_TOP | EDGE_BOTTOM);
1652 s->dsp.draw_edges(s->current_picture.f.data[2], s->uvlinesize,
1653 s->h_edge_pos >> hshift, s->v_edge_pos >> vshift,
1654 EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift,
1655 EDGE_TOP | EDGE_BOTTOM);
de6d9b64 1656 }
6a9c8594 1657
5975626d 1658 emms_c();
115329f1 1659
c65dfac4
KT
1660 s->last_pict_type = s->pict_type;
1661 s->last_lambda_for [s->pict_type] = s->current_picture_ptr->f.quality;
1662 if (s->pict_type!= AV_PICTURE_TYPE_B) {
1663 s->last_non_b_pict_type = s->pict_type;
8b4c7dbc 1664 }
b536d0aa 1665#if 0
c65dfac4
KT
1666 /* copy back current_picture variables */
1667 for (i = 0; i < MAX_PICTURE_COUNT; i++) {
1668 if (s->picture[i].f.data[0] == s->current_picture.f.data[0]) {
1669 s->picture[i] = s->current_picture;
1e491e29 1670 break;
115329f1 1671 }
1e491e29 1672 }
c65dfac4 1673 assert(i < MAX_PICTURE_COUNT);
115329f1 1674#endif
1e491e29 1675
c65dfac4 1676 if (s->encoding) {
bb628dae 1677 /* release non-reference frames */
c65dfac4
KT
1678 for (i = 0; i < s->picture_count; i++) {
1679 if (s->picture[i].f.data[0] && !s->picture[i].f.reference
1680 /* && s->picture[i].type != FF_BUFFER_TYPE_SHARED */) {
34e46c44 1681 free_frame_buffer(s, &s->picture[i]);
e20c4069
MN
1682 }
1683 }
1e491e29 1684 }
b536d0aa
MN
1685 // clear copies, to avoid confusion
1686#if 0
c65dfac4
KT
1687 memset(&s->last_picture, 0, sizeof(Picture));
1688 memset(&s->next_picture, 0, sizeof(Picture));
b536d0aa
MN
1689 memset(&s->current_picture, 0, sizeof(Picture));
1690#endif
47c0ac96 1691 s->avctx->coded_frame = &s->current_picture_ptr->f;
6a9c8594 1692
36ef5369 1693 if (s->codec_id != AV_CODEC_ID_H264 && s->current_picture.f.reference) {
c6ccb96b 1694 ff_thread_report_progress(&s->current_picture_ptr->f, INT_MAX, 0);
6a9c8594 1695 }
de6d9b64
FB
1696}
1697
7bc9090a 1698/**
58c42af7 1699 * Draw a line from (ex, ey) -> (sx, sy).
db6e7795
MN
1700 * @param w width of the image
1701 * @param h height of the image
1702 * @param stride stride/linesize of the image
1703 * @param color color of the arrow
1704 */
c65dfac4
KT
1705static void draw_line(uint8_t *buf, int sx, int sy, int ex, int ey,
1706 int w, int h, int stride, int color)
1707{
67eca72d 1708 int x, y, fr, f;
115329f1 1709
c65dfac4
KT
1710 sx = av_clip(sx, 0, w - 1);
1711 sy = av_clip(sy, 0, h - 1);
1712 ex = av_clip(ex, 0, w - 1);
1713 ey = av_clip(ey, 0, h - 1);
115329f1 1714
c65dfac4 1715 buf[sy * stride + sx] += color;
115329f1 1716
c65dfac4
KT
1717 if (FFABS(ex - sx) > FFABS(ey - sy)) {
1718 if (sx > ex) {
1345f4ed
DB
1719 FFSWAP(int, sx, ex);
1720 FFSWAP(int, sy, ey);
db6e7795 1721 }
c65dfac4
KT
1722 buf += sx + sy * stride;
1723 ex -= sx;
1724 f = ((ey - sy) << 16) / ex;
992b0318 1725 for (x = 0; x <= ex; x++) {
c65dfac4
KT
1726 y = (x * f) >> 16;
1727 fr = (x * f) & 0xFFFF;
1728 buf[y * stride + x] += (color * (0x10000 - fr)) >> 16;
1729 buf[(y + 1) * stride + x] += (color * fr ) >> 16;
db6e7795 1730 }
c65dfac4
KT
1731 } else {
1732 if (sy > ey) {
1345f4ed
DB
1733 FFSWAP(int, sx, ex);
1734 FFSWAP(int, sy, ey);
db6e7795 1735 }
c65dfac4
KT
1736 buf += sx + sy * stride;
1737 ey -= sy;
1738 if (ey)
1739 f = ((ex - sx) << 16) / ey;
1740 else
1741 f = 0;
1742 for (y = 0; y = ey; y++) {
1743 x = (y * f) >> 16;
1744 fr = (y * f) & 0xFFFF;
1745 buf[y * stride + x] += (color * (0x10000 - fr)) >> 16;
1746 buf[y * stride + x + 1] += (color * fr ) >> 16;
db6e7795
MN
1747 }
1748 }
1749}
1750
1751/**
58c42af7 1752 * Draw an arrow from (ex, ey) -> (sx, sy).
db6e7795
MN
1753 * @param w width of the image
1754 * @param h height of the image
1755 * @param stride stride/linesize of the image
1756 * @param color color of the arrow
1757 */
c65dfac4
KT
1758static void draw_arrow(uint8_t *buf, int sx, int sy, int ex,
1759 int ey, int w, int h, int stride, int color)
1760{
db6e7795
MN
1761 int dx,dy;
1762
c65dfac4
KT
1763 sx = av_clip(sx, -100, w + 100);
1764 sy = av_clip(sy, -100, h + 100);
1765 ex = av_clip(ex, -100, w + 100);
1766 ey = av_clip(ey, -100, h + 100);
115329f1 1767
c65dfac4
KT
1768 dx = ex - sx;
1769 dy = ey - sy;
115329f1 1770
c65dfac4
KT
1771 if (dx * dx + dy * dy > 3 * 3) {
1772 int rx = dx + dy;
1773 int ry = -dx + dy;
1774 int length = ff_sqrt((rx * rx + ry * ry) << 8);
115329f1 1775
c65dfac4
KT
1776 // FIXME subpixel accuracy
1777 rx = ROUNDED_DIV(rx * 3 << 4, length);
1778 ry = ROUNDED_DIV(ry * 3 << 4, length);
115329f1 1779
db6e7795
MN
1780 draw_line(buf, sx, sy, sx + rx, sy + ry, w, h, stride, color);
1781 draw_line(buf, sx, sy, sx - ry, sy + rx, w, h, stride, color);
1782 }
1783 draw_line(buf, sx, sy, ex, ey, w, h, stride, color);
1784}
1785
1786/**
c65dfac4 1787 * Print debugging info for the given picture.
7bc9090a 1788 */
c65dfac4
KT
1789void ff_print_debug_info(MpegEncContext *s, AVFrame *pict)
1790{
1791 if (s->avctx->hwaccel || !pict || !pict->mb_type)
1792 return;
7bc9090a 1793
c65dfac4 1794 if (s->avctx->debug & (FF_DEBUG_SKIP | FF_DEBUG_QP | FF_DEBUG_MB_TYPE)) {
7bc9090a 1795 int x,y;
115329f1 1796
0c9bbaec
WH
1797 av_log(s->avctx,AV_LOG_DEBUG,"New frame, type: ");
1798 switch (pict->pict_type) {
c65dfac4
KT
1799 case AV_PICTURE_TYPE_I:
1800 av_log(s->avctx,AV_LOG_DEBUG,"I\n");
1801 break;
1802 case AV_PICTURE_TYPE_P:
1803 av_log(s->avctx,AV_LOG_DEBUG,"P\n");
1804 break;
1805 case AV_PICTURE_TYPE_B:
1806 av_log(s->avctx,AV_LOG_DEBUG,"B\n");
1807 break;
1808 case AV_PICTURE_TYPE_S:
1809 av_log(s->avctx,AV_LOG_DEBUG,"S\n");
1810 break;
1811 case AV_PICTURE_TYPE_SI:
1812 av_log(s->avctx,AV_LOG_DEBUG,"SI\n");
1813 break;
1814 case AV_PICTURE_TYPE_SP:
1815 av_log(s->avctx,AV_LOG_DEBUG,"SP\n");
1816 break;
0c9bbaec 1817 }
c65dfac4
KT
1818 for (y = 0; y < s->mb_height; y++) {
1819 for (x = 0; x < s->mb_width; x++) {
1820 if (s->avctx->debug & FF_DEBUG_SKIP) {
1821 int count = s->mbskip_table[x + y * s->mb_stride];
1822 if (count > 9)
1823 count = 9;
9b879566 1824 av_log(s->avctx, AV_LOG_DEBUG, "%1d", count);
7bc9090a 1825 }
c65dfac4
KT
1826 if (s->avctx->debug & FF_DEBUG_QP) {
1827 av_log(s->avctx, AV_LOG_DEBUG, "%2d",
1828 pict->qscale_table[x + y * s->mb_stride]);
7bc9090a 1829 }
c65dfac4
KT
1830 if (s->avctx->debug & FF_DEBUG_MB_TYPE) {
1831 int mb_type = pict->mb_type[x + y * s->mb_stride];
1832 // Type & MV direction
1833 if (IS_PCM(mb_type))
9b879566 1834 av_log(s->avctx, AV_LOG_DEBUG, "P");
c65dfac4 1835 else if (IS_INTRA(mb_type) && IS_ACPRED(mb_type))
9b879566 1836 av_log(s->avctx, AV_LOG_DEBUG, "A");
c65dfac4 1837 else if (IS_INTRA4x4(mb_type))
9b879566 1838 av_log(s->avctx, AV_LOG_DEBUG, "i");
c65dfac4 1839 else if (IS_INTRA16x16(mb_type))
9b879566 1840 av_log(s->avctx, AV_LOG_DEBUG, "I");
c65dfac4 1841 else if (IS_DIRECT(mb_type) && IS_SKIP(mb_type))
9b879566 1842 av_log(s->avctx, AV_LOG_DEBUG, "d");
c65dfac4 1843 else if (IS_DIRECT(mb_type))
9b879566 1844 av_log(s->avctx, AV_LOG_DEBUG, "D");
c65dfac4 1845 else if (IS_GMC(mb_type) && IS_SKIP(mb_type))
9b879566 1846 av_log(s->avctx, AV_LOG_DEBUG, "g");
c65dfac4 1847 else if (IS_GMC(mb_type))
9b879566 1848 av_log(s->avctx, AV_LOG_DEBUG, "G");
c65dfac4 1849 else if (IS_SKIP(mb_type))
9b879566 1850 av_log(s->avctx, AV_LOG_DEBUG, "S");
c65dfac4 1851 else if (!USES_LIST(mb_type, 1))
9b879566 1852 av_log(s->avctx, AV_LOG_DEBUG, ">");
c65dfac4 1853 else if (!USES_LIST(mb_type, 0))
9b879566 1854 av_log(s->avctx, AV_LOG_DEBUG, "<");
c65dfac4 1855 else {
7bc9090a 1856 assert(USES_LIST(mb_type, 0) && USES_LIST(mb_type, 1));
9b879566 1857 av_log(s->avctx, AV_LOG_DEBUG, "X");
7bc9090a 1858 }
115329f1 1859
c65dfac4
KT
1860 // segmentation
1861 if (IS_8X8(mb_type))
9b879566 1862 av_log(s->avctx, AV_LOG_DEBUG, "+");
c65dfac4 1863 else if (IS_16X8(mb_type))
9b879566 1864 av_log(s->avctx, AV_LOG_DEBUG, "-");
c65dfac4 1865 else if (IS_8X16(mb_type))
30344a83 1866 av_log(s->avctx, AV_LOG_DEBUG, "|");
c65dfac4 1867 else if (IS_INTRA(mb_type) || IS_16X16(mb_type))
9b879566 1868 av_log(s->avctx, AV_LOG_DEBUG, " ");
7bc9090a 1869 else
9b879566 1870 av_log(s->avctx, AV_LOG_DEBUG, "?");
115329f1
DB
1871
1872
c65dfac4 1873 if (IS_INTERLACED(mb_type))
9b879566 1874 av_log(s->avctx, AV_LOG_DEBUG, "=");
7bc9090a 1875 else
9b879566 1876 av_log(s->avctx, AV_LOG_DEBUG, " ");
7bc9090a 1877 }
7bc9090a 1878 }
9b879566 1879 av_log(s->avctx, AV_LOG_DEBUG, "\n");
7bc9090a
MN
1880 }
1881 }
8d7ec294 1882
ee41963f 1883 if ((s->avctx->debug & (FF_DEBUG_VIS_QP | FF_DEBUG_VIS_MB_TYPE)) ||
c65dfac4
KT
1884 (s->avctx->debug_mv)) {
1885 const int shift = 1 + s->quarter_sample;
db6e7795 1886 int mb_y;
0c9bbaec 1887 uint8_t *ptr;
0c9bbaec 1888 int i;
014d2f05 1889 int h_chroma_shift, v_chroma_shift, block_height;
c65dfac4
KT
1890 const int width = s->avctx->width;
1891 const int height = s->avctx->height;
1892 const int mv_sample_log2 = 4 - pict->motion_subsample_log2;
1893 const int mv_stride = (s->mb_width << mv_sample_log2) +
36ef5369 1894 (s->codec_id == AV_CODEC_ID_H264 ? 0 : 1);
c65dfac4
KT
1895 s->low_delay = 0; // needed to see the vectors without trashing the buffers
1896
c1a02e88
LB
1897 av_pix_fmt_get_chroma_sub_sample(s->avctx->pix_fmt,
1898 &h_chroma_shift, &v_chroma_shift);
c65dfac4
KT
1899 for (i = 0; i < 3; i++) {
1900 memcpy(s->visualization_buffer[i], pict->data[i],
1901 (i == 0) ? pict->linesize[i] * height:
1902 pict->linesize[i] * height >> v_chroma_shift);
1903 pict->data[i] = s->visualization_buffer[i];
0c9bbaec 1904 }
c65dfac4
KT
1905 pict->type = FF_BUFFER_TYPE_COPY;
1906 ptr = pict->data[0];
1907 block_height = 16 >> v_chroma_shift;
db6e7795 1908
c65dfac4 1909 for (mb_y = 0; mb_y < s->mb_height; mb_y++) {
db6e7795 1910 int mb_x;
c65dfac4
KT
1911 for (mb_x = 0; mb_x < s->mb_width; mb_x++) {
1912 const int mb_index = mb_x + mb_y * s->mb_stride;
1913 if ((s->avctx->debug_mv) && pict->motion_val) {
1914 int type;
1915 for (type = 0; type < 3; type++) {
1916 int direction = 0;
1917 switch (type) {
1918 case 0:
1919 if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_P_FOR)) ||
1920 (pict->pict_type!= AV_PICTURE_TYPE_P))
0c9bbaec 1921 continue;
c65dfac4
KT
1922 direction = 0;
1923 break;
1924 case 1:
1925 if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_B_FOR)) ||
1926 (pict->pict_type!= AV_PICTURE_TYPE_B))
0c9bbaec 1927 continue;
c65dfac4
KT
1928 direction = 0;
1929 break;
1930 case 2:
1931 if ((!(s->avctx->debug_mv & FF_DEBUG_VIS_MV_B_BACK)) ||
1932 (pict->pict_type!= AV_PICTURE_TYPE_B))
0c9bbaec 1933 continue;
c65dfac4
KT
1934 direction = 1;
1935 break;
1936 }
1937 if (!USES_LIST(pict->mb_type[mb_index], direction))
1938 continue;
1939
1940 if (IS_8X8(pict->mb_type[mb_index])) {
1941 int i;
1942 for (i = 0; i < 4; i++) {
1943 int sx = mb_x * 16 + 4 + 8 * (i & 1);
1944 int sy = mb_y * 16 + 4 + 8 * (i >> 1);
1945 int xy = (mb_x * 2 + (i & 1) +
1946 (mb_y * 2 + (i >> 1)) * mv_stride) << (mv_sample_log2 - 1);
1947 int mx = (pict->motion_val[direction][xy][0] >> shift) + sx;
1948 int my = (pict->motion_val[direction][xy][1] >> shift) + sy;
1949 draw_arrow(ptr, sx, sy, mx, my, width,
1950 height, s->linesize, 100);
1951 }
1952 } else if (IS_16X8(pict->mb_type[mb_index])) {
1953 int i;
1954 for (i = 0; i < 2; i++) {
1955 int sx = mb_x * 16 + 8;
1956 int sy = mb_y * 16 + 4 + 8 * i;
1957 int xy = (mb_x * 2 + (mb_y * 2 + i) * mv_stride) << (mv_sample_log2 - 1);
1958 int mx = (pict->motion_val[direction][xy][0] >> shift);
1959 int my = (pict->motion_val[direction][xy][1] >> shift);
1960
1961 if (IS_INTERLACED(pict->mb_type[mb_index]))
1962 my *= 2;
1963
1964 draw_arrow(ptr, sx, sy, mx + sx, my + sy, width,
1965 height, s->linesize, 100);
1966 }
1967 } else if (IS_8X16(pict->mb_type[mb_index])) {
1968 int i;
1969 for (i = 0; i < 2; i++) {
1970 int sx = mb_x * 16 + 4 + 8 * i;
1971 int sy = mb_y * 16 + 8;
1972 int xy = (mb_x * 2 + i + mb_y * 2 * mv_stride) << (mv_sample_log2 - 1);
1973 int mx = pict->motion_val[direction][xy][0] >> shift;
1974 int my = pict->motion_val[direction][xy][1] >> shift;
1975
1976 if (IS_INTERLACED(pict->mb_type[mb_index]))
1977 my *= 2;
1978
1979 draw_arrow(ptr, sx, sy, mx + sx, my + sy, width,
1980 height, s->linesize, 100);
1981 }
1982 } else {
1983 int sx = mb_x * 16 + 8;
1984 int sy = mb_y * 16 + 8;
1985 int xy = (mb_x + mb_y * mv_stride) << mv_sample_log2;
1986 int mx = pict->motion_val[direction][xy][0] >> shift + sx;
1987 int my = pict->motion_val[direction][xy][1] >> shift + sy;
1988 draw_arrow(ptr, sx, sy, mx, my, width, height, s->linesize, 100);
1989 }
9bc8b386 1990 }
864119b6 1991 }
c65dfac4
KT
1992 if ((s->avctx->debug & FF_DEBUG_VIS_QP) && pict->motion_val) {
1993 uint64_t c = (pict->qscale_table[mb_index] * 128 / 31) *
1994 0x0101010101010101ULL;
864119b6 1995 int y;
c65dfac4
KT
1996 for (y = 0; y < block_height; y++) {
1997 *(uint64_t *)(pict->data[1] + 8 * mb_x +
1998 (block_height * mb_y + y) *
1999 pict->linesize[1]) = c;
2000 *(uint64_t *)(pict->data[2] + 8 * mb_x +
2001 (block_height * mb_y + y) *
2002 pict->linesize[2]) = c;
864119b6
MN
2003 }
2004 }
c65dfac4
KT
2005 if ((s->avctx->debug & FF_DEBUG_VIS_MB_TYPE) &&
2006 pict->motion_val) {
2007 int mb_type = pict->mb_type[mb_index];
864119b6
MN
2008 uint64_t u,v;
2009 int y;
c65dfac4
KT
2010#define COLOR(theta, r) \
2011 u = (int)(128 + r * cos(theta * 3.141592 / 180)); \
2012 v = (int)(128 + r * sin(theta * 3.141592 / 180));
2013
2014
2015 u = v = 128;
2016 if (IS_PCM(mb_type)) {
2017 COLOR(120, 48)
2018 } else if ((IS_INTRA(mb_type) && IS_ACPRED(mb_type)) ||
2019 IS_INTRA16x16(mb_type)) {
2020 COLOR(30, 48)
2021 } else if (IS_INTRA4x4(mb_type)) {
2022 COLOR(90, 48)
2023 } else if (IS_DIRECT(mb_type) && IS_SKIP(mb_type)) {
2024 // COLOR(120, 48)
2025 } else if (IS_DIRECT(mb_type)) {
2026 COLOR(150, 48)
2027 } else if (IS_GMC(mb_type) && IS_SKIP(mb_type)) {
2028 COLOR(170, 48)
2029 } else if (IS_GMC(mb_type)) {
2030 COLOR(190, 48)
2031 } else if (IS_SKIP(mb_type)) {
2032 // COLOR(180, 48)
2033 } else if (!USES_LIST(mb_type, 1)) {
2034 COLOR(240, 48)
2035 } else if (!USES_LIST(mb_type, 0)) {
2036 COLOR(0, 48)
2037 } else {
864119b6
MN
2038 assert(USES_LIST(mb_type, 0) && USES_LIST(mb_type, 1));
2039 COLOR(300,48)
2040 }
2041
c65dfac4
KT
2042 u *= 0x0101010101010101ULL;
2043 v *= 0x0101010101010101ULL;
2044 for (y = 0; y < block_height; y++) {
2045 *(uint64_t *)(pict->data[1] + 8 * mb_x +
2046 (block_height * mb_y + y) * pict->linesize[1]) = u;
2047 *(uint64_t *)(pict->data[2] + 8 * mb_x +
2048 (block_height * mb_y + y) * pict->linesize[2]) = v;
864119b6
MN
2049 }
2050
c65dfac4
KT
2051 // segmentation
2052 if (IS_8X8(mb_type) || IS_16X8(mb_type)) {
2053 *(uint64_t *)(pict->data[0] + 16 * mb_x + 0 +
2054 (16 * mb_y + 8) * pict->linesize[0]) ^= 0x8080808080808080ULL;
2055 *(uint64_t *)(pict->data[0] + 16 * mb_x + 8 +
2056 (16 * mb_y + 8) * pict->linesize[0]) ^= 0x8080808080808080ULL;
864119b6 2057 }
c65dfac4
KT
2058 if (IS_8X8(mb_type) || IS_8X16(mb_type)) {
2059 for (y = 0; y < 16; y++)
2060 pict->data[0][16 * mb_x + 8 + (16 * mb_y + y) *
2061 pict->linesize[0]] ^= 0x80;
864119b6 2062 }
c65dfac4
KT
2063 if (IS_8X8(mb_type) && mv_sample_log2 >= 2) {
2064 int dm = 1 << (mv_sample_log2 - 2);
2065 for (i = 0; i < 4; i++) {
2066 int sx = mb_x * 16 + 8 * (i & 1);
2067 int sy = mb_y * 16 + 8 * (i >> 1);
2068 int xy = (mb_x * 2 + (i & 1) +
2069 (mb_y * 2 + (i >> 1)) * mv_stride) << (mv_sample_log2 - 1);
2070 // FIXME bidir
2071 int32_t *mv = (int32_t *) &pict->motion_val[0][xy];
2072 if (mv[0] != mv[dm] ||
2073 mv[dm * mv_stride] != mv[dm * (mv_stride + 1)])
2074 for (y = 0; y < 8; y++)
2075 pict->data[0][sx + 4 + (sy + y) * pict->linesize[0]] ^= 0x80;
2076 if (mv[0] != mv[dm * mv_stride] || mv[dm] != mv[dm * (mv_stride + 1)])
2077 *(uint64_t *)(pict->data[0] + sx + (sy + 4) *
2078 pict->linesize[0]) ^= 0x8080808080808080ULL;
e21f3983
LM
2079 }
2080 }
115329f1 2081
c65dfac4 2082 if (IS_INTERLACED(mb_type) &&
36ef5369 2083 s->codec_id == AV_CODEC_ID_H264) {
864119b6
MN
2084 // hmm
2085 }
db6e7795 2086 }
c65dfac4 2087 s->mbskip_table[mb_index] = 0;
db6e7795
MN
2088 }
2089 }
2090 }
7bc9090a
MN
2091}
2092
6a9c8594
AS
2093/**
2094 * find the lowest MB row referenced in the MVs
2095 */
efd29844 2096int ff_MPV_lowest_referenced_row(MpegEncContext *s, int dir)
6a9c8594
AS
2097{
2098 int my_max = INT_MIN, my_min = INT_MAX, qpel_shift = !s->quarter_sample;
2099 int my, off, i, mvs;
2100
6bcdfe48
MN
2101 if (s->picture_structure != PICT_FRAME || s->mcsel)
2102 goto unhandled;
6a9c8594
AS
2103
2104 switch (s->mv_type) {
2105 case MV_TYPE_16X16:
2106 mvs = 1;
2107 break;
2108 case MV_TYPE_16X8:
2109 mvs = 2;
2110 break;
2111 case MV_TYPE_8X8:
2112 mvs = 4;
2113 break;
2114 default:
2115 goto unhandled;
2116 }
2117
2118 for (i = 0; i < mvs; i++) {
2119 my = s->mv[dir][i][1]<<qpel_shift;
2120 my_max = FFMAX(my_max, my);
2121 my_min = FFMIN(my_min, my);
2122 }
2123
2124 off = (FFMAX(-my_min, my_max) + 63) >> 6;
2125
2126 return FFMIN(FFMAX(s->mb_y + off, 0), s->mb_height-1);
2127unhandled:
2128 return s->mb_height-1;
2129}
2130
3ada94ba
BF
2131/* put block[] to dest[] */
2132static inline void put_dct(MpegEncContext *s,
88bd7fdc 2133 int16_t *block, int i, uint8_t *dest, int line_size, int qscale)
178fcca8 2134{
3ada94ba
BF
2135 s->dct_unquantize_intra(s, block, i, qscale);
2136 s->dsp.idct_put (dest, line_size, block);
2137}
da9c9637 2138
3ada94ba
BF
2139/* add block[] to dest[] */
2140static inline void add_dct(MpegEncContext *s,
88bd7fdc 2141 int16_t *block, int i, uint8_t *dest, int line_size)
3ada94ba
BF
2142{
2143 if (s->block_last_index[i] >= 0) {
2144 s->dsp.idct_add (dest, line_size, block);
2145 }
2146}
2417652e 2147
115329f1 2148static inline void add_dequant_dct(MpegEncContext *s,
88bd7fdc 2149 int16_t *block, int i, uint8_t *dest, int line_size, int qscale)
0f440e02 2150{
de6d9b64 2151 if (s->block_last_index[i] >= 0) {
d50635cd 2152 s->dct_unquantize_inter(s, block, i, qscale);
9dbcbd92 2153
b0368839 2154 s->dsp.idct_add (dest, line_size, block);
de6d9b64
FB
2155 }
2156}
2157
7f2fe444 2158/**
58c42af7 2159 * Clean dc, ac, coded_block for the current non-intra MB.
7f2fe444
MN
2160 */
2161void ff_clean_intra_table_entries(MpegEncContext *s)
2162{
137c8468 2163 int wrap = s->b8_stride;
7f2fe444 2164 int xy = s->block_index[0];
115329f1
DB
2165
2166 s->dc_val[0][xy ] =
2167 s->dc_val[0][xy + 1 ] =
7f2fe444
MN
2168 s->dc_val[0][xy + wrap] =
2169 s->dc_val[0][xy + 1 + wrap] = 1024;
2170 /* ac pred */
0c1a9eda
ZK
2171 memset(s->ac_val[0][xy ], 0, 32 * sizeof(int16_t));
2172 memset(s->ac_val[0][xy + wrap], 0, 32 * sizeof(int16_t));
7f2fe444
MN
2173 if (s->msmpeg4_version>=3) {
2174 s->coded_block[xy ] =
2175 s->coded_block[xy + 1 ] =
2176 s->coded_block[xy + wrap] =
2177 s->coded_block[xy + 1 + wrap] = 0;
2178 }
2179 /* chroma */
137c8468
MN
2180 wrap = s->mb_stride;
2181 xy = s->mb_x + s->mb_y * wrap;
7f2fe444
MN
2182 s->dc_val[1][xy] =
2183 s->dc_val[2][xy] = 1024;
2184 /* ac pred */
0c1a9eda
ZK
2185 memset(s->ac_val[1][xy], 0, 16 * sizeof(int16_t));
2186 memset(s->ac_val[2][xy], 0, 16 * sizeof(int16_t));
115329f1 2187
137c8468 2188 s->mbintra_table[xy]= 0;
7f2fe444
MN
2189}
2190
de6d9b64
FB
2191/* generic function called after a macroblock has been parsed by the
2192 decoder or after it has been encoded by the encoder.
2193
2194 Important variables used:
2195 s->mb_intra : true if intra macroblock
2196 s->mv_dir : motion vector direction
2197 s->mv_type : motion vector type
2198 s->mv : motion vector
2199 s->interlaced_dct : true if interlaced dct used (mpeg2)
2200 */
54816a3e 2201static av_always_inline
88bd7fdc 2202void MPV_decode_mb_internal(MpegEncContext *s, int16_t block[12][64],
2bcbd984 2203 int is_mpeg12)
de6d9b64 2204{
7bc9090a 2205 const int mb_xy = s->mb_y * s->mb_stride + s->mb_x;
83344066 2206 if(CONFIG_MPEG_XVMC_DECODER && s->avctx->xvmc_acceleration){
78f9a878 2207 ff_xvmc_decode_mb(s);//xvmc uses pblocks
2e7b4c84
IK
2208 return;
2209 }
de6d9b64 2210
8289c6fa
WH
2211 if(s->avctx->debug&FF_DEBUG_DCT_COEFF) {
2212 /* save DCT coefficients */
2213 int i,j;
88bd7fdc 2214 int16_t *dct = &s->current_picture.f.dct_coeff[mb_xy * 64 * 6];
c4fb3b03
MN
2215 av_log(s->avctx, AV_LOG_DEBUG, "DCT coeffs of MB at %dx%d:\n", s->mb_x, s->mb_y);
2216 for(i=0; i<6; i++){
2217 for(j=0; j<64; j++){
8289c6fa 2218 *dct++ = block[i][s->dsp.idct_permutation[j]];
c4fb3b03
MN
2219 av_log(s->avctx, AV_LOG_DEBUG, "%5d", dct[-1]);
2220 }
2221 av_log(s->avctx, AV_LOG_DEBUG, "\n");
2222 }
8289c6fa
WH
2223 }
2224
657ccb5a 2225 s->current_picture.f.qscale_table[mb_xy] = s->qscale;
79e7b305 2226
de6d9b64
FB
2227 /* update DC predictors for P macroblocks */
2228 if (!s->mb_intra) {
bd7c626a 2229 if (!is_mpeg12 && (s->h263_pred || s->h263_aic)) {
0f440e02 2230 if(s->mbintra_table[mb_xy])
7f2fe444 2231 ff_clean_intra_table_entries(s);
de6d9b64 2232 } else {
7f2fe444
MN
2233 s->last_dc[0] =
2234 s->last_dc[1] =
de6d9b64
FB
2235 s->last_dc[2] = 128 << s->intra_dc_precision;
2236 }
2237 }
bd7c626a 2238 else if (!is_mpeg12 && (s->h263_pred || s->h263_aic))
0f440e02 2239 s->mbintra_table[mb_xy]=1;
bff6ecaa 2240
975a1447 2241 if ((s->flags&CODEC_FLAG_PSNR) || !(s->encoding && (s->intra_only || s->pict_type==AV_PICTURE_TYPE_B) && s->avctx->mb_decision != FF_MB_DECISION_RD)) { //FIXME precalc
0c1a9eda 2242 uint8_t *dest_y, *dest_cb, *dest_cr;
0f440e02 2243 int dct_linesize, dct_offset;
b3184779
MN
2244 op_pixels_func (*op_pix)[4];
2245 qpel_mc_func (*op_qpix)[16];
657ccb5a
DB
2246 const int linesize = s->current_picture.f.linesize[0]; //not s->linesize as this would be wrong for field pics
2247 const int uvlinesize = s->current_picture.f.linesize[1];
2bcbd984
MR
2248 const int readable= s->pict_type != AV_PICTURE_TYPE_B || s->encoding || s->avctx->draw_horiz_band;
2249 const int block_size = 8;
3bb4e23a 2250
1e491e29 2251 /* avoid copy if macroblock skipped in last frame too */
1e491e29
MN
2252 /* skip only during decoding as we might trash the buffers during encoding a bit */
2253 if(!s->encoding){
0c1a9eda 2254 uint8_t *mbskip_ptr = &s->mbskip_table[mb_xy];
1e491e29 2255
160d679c
MM
2256 if (s->mb_skipped) {
2257 s->mb_skipped= 0;
975a1447 2258 assert(s->pict_type!=AV_PICTURE_TYPE_I);
8400b126 2259 *mbskip_ptr = 1;
657ccb5a 2260 } else if(!s->current_picture.f.reference) {
8400b126 2261 *mbskip_ptr = 1;
f943e138 2262 } else{
3bb4e23a
FB
2263 *mbskip_ptr = 0; /* not skipped */
2264 }
3994623d 2265 }
115329f1 2266
ffdff4d7 2267 dct_linesize = linesize << s->interlaced_dct;
3dc99a18 2268 dct_offset = s->interlaced_dct ? linesize : linesize * block_size;
115329f1 2269
b68ab260
MN
2270 if(readable){
2271 dest_y= s->dest[0];
2272 dest_cb= s->dest[1];
2273 dest_cr= s->dest[2];
2274 }else{
9c3d33d6 2275 dest_y = s->b_scratchpad;
ae35f5e1 2276 dest_cb= s->b_scratchpad+16*linesize;
ffdff4d7 2277 dest_cr= s->b_scratchpad+32*linesize;
b68ab260 2278 }
178fcca8 2279
de6d9b64
FB
2280 if (!s->mb_intra) {
2281 /* motion handling */
dfb706da 2282 /* decoding or more than one mb_type (MC was already done otherwise) */
7d1c3fc1 2283 if(!s->encoding){
6a9c8594 2284
27237d52 2285 if(HAVE_THREADS && s->avctx->active_thread_type&FF_THREAD_FRAME) {
6a9c8594 2286 if (s->mv_dir & MV_DIR_FORWARD) {
47c0ac96
DB
2287 ff_thread_await_progress(&s->last_picture_ptr->f,
2288 ff_MPV_lowest_referenced_row(s, 0),
2289 0);
6a9c8594
AS
2290 }
2291 if (s->mv_dir & MV_DIR_BACKWARD) {
47c0ac96
DB
2292 ff_thread_await_progress(&s->next_picture_ptr->f,
2293 ff_MPV_lowest_referenced_row(s, 1),
2294 0);
6a9c8594
AS
2295 }
2296 }
2297
2bcbd984
MR
2298 op_qpix= s->me.qpel_put;
2299 if ((!s->no_rounding) || s->pict_type==AV_PICTURE_TYPE_B){
2300 op_pix = s->dsp.put_pixels_tab;
178fcca8 2301 }else{
2bcbd984
MR
2302 op_pix = s->dsp.put_no_rnd_pixels_tab;
2303 }
2304 if (s->mv_dir & MV_DIR_FORWARD) {
7a851153 2305 ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 0, s->last_picture.f.data, op_pix, op_qpix);
2bcbd984
MR
2306 op_pix = s->dsp.avg_pixels_tab;
2307 op_qpix= s->me.qpel_avg;
2308 }
2309 if (s->mv_dir & MV_DIR_BACKWARD) {
7a851153 2310 ff_MPV_motion(s, dest_y, dest_cb, dest_cr, 1, s->next_picture.f.data, op_pix, op_qpix);
9dbcbd92 2311 }
de6d9b64
FB
2312 }
2313
0f440e02 2314 /* skip dequant / idct if we are really late ;) */
8c3eba7c 2315 if(s->avctx->skip_idct){
975a1447
SS
2316 if( (s->avctx->skip_idct >= AVDISCARD_NONREF && s->pict_type == AV_PICTURE_TYPE_B)
2317 ||(s->avctx->skip_idct >= AVDISCARD_NONKEY && s->pict_type != AV_PICTURE_TYPE_I)
8c3eba7c
MN
2318 || s->avctx->skip_idct >= AVDISCARD_ALL)
2319 goto skip_idct;
2320 }
0f440e02 2321
de6d9b64 2322 /* add dct residue */
36ef5369
AK
2323 if(s->encoding || !( s->msmpeg4_version || s->codec_id==AV_CODEC_ID_MPEG1VIDEO || s->codec_id==AV_CODEC_ID_MPEG2VIDEO
2324 || (s->codec_id==AV_CODEC_ID_MPEG4 && !s->mpeg_quant))){
178fcca8
MN
2325 add_dequant_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
2326 add_dequant_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
2327 add_dequant_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
2328 add_dequant_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
0f440e02 2329
49fb20cb 2330 if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2d974017
BC
2331 if (s->chroma_y_shift){
2332 add_dequant_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
2333 add_dequant_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
2334 }else{
2335 dct_linesize >>= 1;
2336 dct_offset >>=1;
2337 add_dequant_dct(s, block[4], 4, dest_cb, dct_linesize, s->chroma_qscale);
2338 add_dequant_dct(s, block[5], 5, dest_cr, dct_linesize, s->chroma_qscale);
2339 add_dequant_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize, s->chroma_qscale);
2340 add_dequant_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize, s->chroma_qscale);
2341 }
b50eef3a 2342 }
36ef5369 2343 } else if(is_mpeg12 || (s->codec_id != AV_CODEC_ID_WMV2)){
178fcca8
MN
2344 add_dct(s, block[0], 0, dest_y , dct_linesize);
2345 add_dct(s, block[1], 1, dest_y + block_size, dct_linesize);
2346 add_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize);
2347 add_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize);
de6d9b64 2348
49fb20cb 2349 if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
ffdff4d7
IK
2350 if(s->chroma_y_shift){//Chroma420
2351 add_dct(s, block[4], 4, dest_cb, uvlinesize);
2352 add_dct(s, block[5], 5, dest_cr, uvlinesize);
2353 }else{
2354 //chroma422
2355 dct_linesize = uvlinesize << s->interlaced_dct;
3dc99a18 2356 dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize * 8;
ffdff4d7
IK
2357
2358 add_dct(s, block[4], 4, dest_cb, dct_linesize);
2359 add_dct(s, block[5], 5, dest_cr, dct_linesize);
2360 add_dct(s, block[6], 6, dest_cb+dct_offset, dct_linesize);
2361 add_dct(s, block[7], 7, dest_cr+dct_offset, dct_linesize);
2362 if(!s->chroma_x_shift){//Chroma444
2363 add_dct(s, block[8], 8, dest_cb+8, dct_linesize);
2364 add_dct(s, block[9], 9, dest_cr+8, dct_linesize);
2365 add_dct(s, block[10], 10, dest_cb+8+dct_offset, dct_linesize);
2366 add_dct(s, block[11], 11, dest_cr+8+dct_offset, dct_linesize);
2367 }
2368 }
2369 }//fi gray
2370 }
d702a2e6 2371 else if (CONFIG_WMV2_DECODER || CONFIG_WMV2_ENCODER) {
1457ab52 2372 ff_wmv2_add_mb(s, block, dest_y, dest_cb, dest_cr);
0f440e02 2373 }
de6d9b64
FB
2374 } else {
2375 /* dct only in intra block */
36ef5369 2376 if(s->encoding || !(s->codec_id==AV_CODEC_ID_MPEG1VIDEO || s->codec_id==AV_CODEC_ID_MPEG2VIDEO)){
178fcca8
MN
2377 put_dct(s, block[0], 0, dest_y , dct_linesize, s->qscale);
2378 put_dct(s, block[1], 1, dest_y + block_size, dct_linesize, s->qscale);
2379 put_dct(s, block[2], 2, dest_y + dct_offset , dct_linesize, s->qscale);
2380 put_dct(s, block[3], 3, dest_y + dct_offset + block_size, dct_linesize, s->qscale);
a0201736 2381
49fb20cb 2382 if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
2d974017
BC
2383 if(s->chroma_y_shift){
2384 put_dct(s, block[4], 4, dest_cb, uvlinesize, s->chroma_qscale);
2385 put_dct(s, block[5], 5, dest_cr, uvlinesize, s->chroma_qscale);
2386 }else{
2387 dct_offset >>=1;
2388 dct_linesize >>=1;
2389 put_dct(s, block[4], 4, dest_cb, dct_linesize, s->chroma_qscale);
2390 put_dct(s, block[5], 5, dest_cr, dct_linesize, s->chroma_qscale);
2391 put_dct(s, block[6], 6, dest_cb + dct_offset, dct_linesize, s->chroma_qscale);
3ada94ba 2392 put_dct(s, block[7], 7, dest_cr + dct_offset, dct_linesize, s->chroma_qscale);
477ab036
MN
2393 }
2394 }
2395 }else{
3ada94ba
BF
2396 s->dsp.idct_put(dest_y , dct_linesize, block[0]);
2397 s->dsp.idct_put(dest_y + block_size, dct_linesize, block[1]);
2398 s->dsp.idct_put(dest_y + dct_offset , dct_linesize, block[2]);
2399 s->dsp.idct_put(dest_y + dct_offset + block_size, dct_linesize, block[3]);
fbb89806 2400
49fb20cb 2401 if(!CONFIG_GRAY || !(s->flags&CODEC_FLAG_GRAY)){
3ada94ba
BF
2402 if(s->chroma_y_shift){
2403 s->dsp.idct_put(dest_cb, uvlinesize, block[4]);
2404 s->dsp.idct_put(dest_cr, uvlinesize, block[5]);
2405 }else{
fbb89806 2406
3ada94ba 2407 dct_linesize = uvlinesize << s->interlaced_dct;
3dc99a18 2408 dct_offset = s->interlaced_dct ? uvlinesize : uvlinesize * 8;
9981dfc6 2409
3ada94ba
BF
2410 s->dsp.idct_put(dest_cb, dct_linesize, block[4]);
2411 s->dsp.idct_put(dest_cr, dct_linesize, block[5]);
2412 s->dsp.idct_put(dest_cb + dct_offset, dct_linesize, block[6]);
2413 s->dsp.idct_put(dest_cr + dct_offset, dct_linesize, block[7]);
2414 if(!s->chroma_x_shift){//Chroma444
2415 s->dsp.idct_put(dest_cb + 8, dct_linesize, block[8]);
2416 s->dsp.idct_put(dest_cr + 8, dct_linesize, block[9]);
2417 s->dsp.idct_put(dest_cb + 8 + dct_offset, dct_linesize, block[10]);
2418 s->dsp.idct_put(dest_cr + 8 + dct_offset, dct_linesize, block[11]);
2419 }
2420 }
2421 }//gray
9981dfc6 2422 }
477ab036 2423 }
3ada94ba
BF
2424skip_idct:
2425 if(!readable){
2426 s->dsp.put_pixels_tab[0][0](s->dest[0], dest_y , linesize,16);
2427 s->dsp.put_pixels_tab[s->chroma_x_shift][0](s->dest[1], dest_cb, uvlinesize,16 >> s->chroma_y_shift);
2428 s->dsp.put_pixels_tab[s->chroma_x_shift][0](s->dest[2], dest_cr, uvlinesize,16 >> s->chroma_y_shift);
34f60ee6 2429 }
477ab036 2430 }
477ab036
MN
2431}
2432
88bd7fdc 2433void ff_MPV_decode_mb(MpegEncContext *s, int16_t block[12][64]){
b250f9c6 2434#if !CONFIG_SMALL
bd7c626a 2435 if(s->out_format == FMT_MPEG1) {
2bcbd984 2436 MPV_decode_mb_internal(s, block, 1);
bd7c626a
KC
2437 } else
2438#endif
2bcbd984 2439 MPV_decode_mb_internal(s, block, 0);
77ea0d4b
MN
2440}
2441
3ada94ba 2442/**
3ada94ba
BF
2443 * @param h is the normal height, this will be reduced automatically if needed for the last row
2444 */
1d0feb5d
AK
2445void ff_draw_horiz_band(AVCodecContext *avctx, DSPContext *dsp, Picture *cur,
2446 Picture *last, int y, int h, int picture_structure,
2447 int first_field, int draw_edges, int low_delay,
2448 int v_edge_pos, int h_edge_pos)
2449{
2450 const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
2451 int hshift = desc->log2_chroma_w;
2452 int vshift = desc->log2_chroma_h;
2453 const int field_pic = picture_structure != PICT_FRAME;
6a9c8594
AS
2454 if(field_pic){
2455 h <<= 1;
2456 y <<= 1;
2457 }
2458
1d0feb5d
AK
2459 if (!avctx->hwaccel &&
2460 !(avctx->codec->capabilities & CODEC_CAP_HWACCEL_VDPAU) &&
2461 draw_edges &&
2462 cur->f.reference &&
2463 !(avctx->flags & CODEC_FLAG_EMU_EDGE)) {
2464 int *linesize = cur->f.linesize;
6a9c8594
AS
2465 int sides = 0, edge_h;
2466 if (y==0) sides |= EDGE_TOP;
1d0feb5d
AK
2467 if (y + h >= v_edge_pos)
2468 sides |= EDGE_BOTTOM;
6a9c8594 2469
1d0feb5d 2470 edge_h= FFMIN(h, v_edge_pos - y);
6a9c8594 2471
1d0feb5d
AK
2472 dsp->draw_edges(cur->f.data[0] + y * linesize[0],
2473 linesize[0], h_edge_pos, edge_h,
2474 EDGE_WIDTH, EDGE_WIDTH, sides);
2475 dsp->draw_edges(cur->f.data[1] + (y >> vshift) * linesize[1],
2476 linesize[1], h_edge_pos >> hshift, edge_h >> vshift,
2477 EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift, sides);
2478 dsp->draw_edges(cur->f.data[2] + (y >> vshift) * linesize[2],
2479 linesize[2], h_edge_pos >> hshift, edge_h >> vshift,
2480 EDGE_WIDTH >> hshift, EDGE_WIDTH >> vshift, sides);
6a9c8594
AS
2481 }
2482
1d0feb5d 2483 h = FFMIN(h, avctx->height - y);
6a9c8594 2484
1d0feb5d 2485 if(field_pic && first_field && !(avctx->slice_flags&SLICE_FLAG_ALLOW_FIELD)) return;
6a9c8594 2486
1d0feb5d 2487 if (avctx->draw_horiz_band) {
3ada94ba 2488 AVFrame *src;
560f773c
JR
2489 int offset[AV_NUM_DATA_POINTERS];
2490 int i;
77ea0d4b 2491
1d0feb5d
AK
2492 if(cur->f.pict_type == AV_PICTURE_TYPE_B || low_delay ||
2493 (avctx->slice_flags & SLICE_FLAG_CODED_ORDER))
2494 src = &cur->f;
2495 else if (last)
2496 src = &last->f;
3ada94ba
BF
2497 else
2498 return;
115329f1 2499
1d0feb5d
AK
2500 if (cur->f.pict_type == AV_PICTURE_TYPE_B &&
2501 picture_structure == PICT_FRAME &&
2502 avctx->codec_id != AV_CODEC_ID_H264 &&
2503 avctx->codec_id != AV_CODEC_ID_SVQ3) {
560f773c
JR
2504 for (i = 0; i < AV_NUM_DATA_POINTERS; i++)
2505 offset[i] = 0;
77ea0d4b 2506 }else{
1d0feb5d 2507 offset[0]= y * src->linesize[0];
3ada94ba 2508 offset[1]=
1d0feb5d 2509 offset[2]= (y >> vshift) * src->linesize[1];
560f773c
JR
2510 for (i = 3; i < AV_NUM_DATA_POINTERS; i++)
2511 offset[i] = 0;
77ea0d4b 2512 }
115329f1 2513
3ada94ba 2514 emms_c();
77ea0d4b 2515
1d0feb5d
AK
2516 avctx->draw_horiz_band(avctx, src, offset,
2517 y, picture_structure, h);
3ada94ba
BF
2518 }
2519}
115329f1 2520
1d0feb5d
AK
2521void ff_mpeg_draw_horiz_band(MpegEncContext *s, int y, int h)
2522{
2523 int draw_edges = s->unrestricted_mv && !s->intra_only;
2524 ff_draw_horiz_band(s->avctx, &s->dsp, &s->current_picture,
2525 &s->last_picture, y, h, s->picture_structure,
2526 s->first_field, draw_edges, s->low_delay,
2527 s->v_edge_pos, s->h_edge_pos);
2528}
2529
3ada94ba 2530void ff_init_block_index(MpegEncContext *s){ //FIXME maybe rename
657ccb5a
DB
2531 const int linesize = s->current_picture.f.linesize[0]; //not s->linesize as this would be wrong for field pics
2532 const int uvlinesize = s->current_picture.f.linesize[1];
2bcbd984 2533 const int mb_size= 4;
77ea0d4b 2534
3ada94ba
BF
2535 s->block_index[0]= s->b8_stride*(s->mb_y*2 ) - 2 + s->mb_x*2;
2536 s->block_index[1]= s->b8_stride*(s->mb_y*2 ) - 1 + s->mb_x*2;
2537 s->block_index[2]= s->b8_stride*(s->mb_y*2 + 1) - 2 + s->mb_x*2;
2538 s->block_index[3]= s->b8_stride*(s->mb_y*2 + 1) - 1 + s->mb_x*2;
2539 s->block_index[4]= s->mb_stride*(s->mb_y + 1) + s->b8_stride*s->mb_height*2 + s->mb_x - 1;
2540 s->block_index[5]= s->mb_stride*(s->mb_y + s->mb_height + 2) + s->b8_stride*s->mb_height*2 + s->mb_x - 1;
2541 //block_index is not used by mpeg2, so it is not affected by chroma_format
115329f1 2542
657ccb5a
DB
2543 s->dest[0] = s->current_picture.f.data[0] + ((s->mb_x - 1) << mb_size);
2544 s->dest[1] = s->current_picture.f.data[1] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift));
2545 s->dest[2] = s->current_picture.f.data[2] + ((s->mb_x - 1) << (mb_size - s->chroma_x_shift));
115329f1 2546
975a1447 2547 if(!(s->pict_type==AV_PICTURE_TYPE_B && s->avctx->draw_horiz_band && s->picture_structure==PICT_FRAME))
3ada94ba 2548 {
078cdecf 2549 if(s->picture_structure==PICT_FRAME){
3ada94ba
BF
2550 s->dest[0] += s->mb_y * linesize << mb_size;
2551 s->dest[1] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift);
2552 s->dest[2] += s->mb_y * uvlinesize << (mb_size - s->chroma_y_shift);
078cdecf
MN
2553 }else{
2554 s->dest[0] += (s->mb_y>>1) * linesize << mb_size;
2555 s->dest[1] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift);
2556 s->dest[2] += (s->mb_y>>1) * uvlinesize << (mb_size - s->chroma_y_shift);
2557 assert((s->mb_y&1) == (s->picture_structure == PICT_BOTTOM_FIELD));
2558 }
77ea0d4b 2559 }
77ea0d4b 2560}
115329f1 2561
0b016eb9
DB
2562/**
2563 * Permute an 8x8 block.
2564 * @param block the block which will be permuted according to the given permutation vector
2565 * @param permutation the permutation vector
2566 * @param last the last non zero coefficient in scantable order, used to speed the permutation up
2567 * @param scantable the used scantable, this is only used to speed the permutation up, the block is not
2568 * (inverse) permutated to scantable order!
2569 */
2570void ff_block_permute(int16_t *block, uint8_t *permutation, const uint8_t *scantable, int last)
2571{
2572 int i;
2573 int16_t temp[64];
2574
2575 if(last<=0) return;
2576 //if(permutation[1]==1) return; //FIXME it is ok but not clean and might fail for some permutations
2577
2578 for(i=0; i<=last; i++){
2579 const int j= scantable[i];
2580 temp[j]= block[j];
2581 block[j]=0;
2582 }
2583
2584 for(i=0; i<=last; i++){
2585 const int j= scantable[i];
2586 const int perm_j= permutation[j];
2587 block[perm_j]= temp[j];
2588 }
2589}
2590
3ada94ba
BF
2591void ff_mpeg_flush(AVCodecContext *avctx){
2592 int i;
2593 MpegEncContext *s = avctx->priv_data;
77ea0d4b 2594
3ada94ba
BF
2595 if(s==NULL || s->picture==NULL)
2596 return;
77ea0d4b 2597
6a9c8594 2598 for(i=0; i<s->picture_count; i++){
657ccb5a
DB
2599 if (s->picture[i].f.data[0] &&
2600 (s->picture[i].f.type == FF_BUFFER_TYPE_INTERNAL ||
2601 s->picture[i].f.type == FF_BUFFER_TYPE_USER))
34e46c44 2602 free_frame_buffer(s, &s->picture[i]);
de6d9b64 2603 }
3ada94ba 2604 s->current_picture_ptr = s->last_picture_ptr = s->next_picture_ptr = NULL;
115329f1 2605
3ada94ba 2606 s->mb_x= s->mb_y= 0;
7801d21d 2607
3ada94ba
BF
2608 s->parse_context.state= -1;
2609 s->parse_context.frame_start_found= 0;
2610 s->parse_context.overread= 0;
2611 s->parse_context.overread_index= 0;
2612 s->parse_context.index= 0;
2613 s->parse_context.last_index= 0;
2614 s->bitstream_buffer_size=0;
2615 s->pp_time=0;
de6d9b64
FB
2616}
2617
115329f1 2618static void dct_unquantize_mpeg1_intra_c(MpegEncContext *s,
88bd7fdc 2619 int16_t *block, int n, int qscale)
de6d9b64 2620{
badaf88e 2621 int i, level, nCoeffs;
0c1a9eda 2622 const uint16_t *quant_matrix;
de6d9b64 2623
2ad1516a 2624 nCoeffs= s->block_last_index[n];
115329f1
DB
2625
2626 if (n < 4)
d50635cd
MN
2627 block[0] = block[0] * s->y_dc_scale;
2628 else
2629 block[0] = block[0] * s->c_dc_scale;
2630 /* XXX: only mpeg1 */
2631 quant_matrix = s->intra_matrix;
2632 for(i=1;i<=nCoeffs;i++) {
2633 int j= s->intra_scantable.permutated[i];
2634 level = block[j];
2635 if (level) {
2636 if (level < 0) {
2637 level = -level;
2638 level = (int)(level * qscale * quant_matrix[j]) >> 3;
2639 level = (level - 1) | 1;
2640 level = -level;
2641 } else {
2642 level = (int)(level * qscale * quant_matrix[j]) >> 3;
2643 level = (level - 1) | 1;
de6d9b64 2644 }
d50635cd 2645 block[j] = level;
de6d9b64 2646 }
d50635cd
MN
2647 }
2648}
2649
115329f1 2650static void dct_unquantize_mpeg1_inter_c(MpegEncContext *s,
88bd7fdc 2651 int16_t *block, int n, int qscale)
d50635cd
MN
2652{
2653 int i, level, nCoeffs;
2654 const uint16_t *quant_matrix;
2655
2656 nCoeffs= s->block_last_index[n];
115329f1 2657
d50635cd
MN
2658 quant_matrix = s->inter_matrix;
2659 for(i=0; i<=nCoeffs; i++) {
2660 int j= s->intra_scantable.permutated[i];
2661 level = block[j];
2662 if (level) {
2663 if (level < 0) {
2664 level = -level;
2665 level = (((level << 1) + 1) * qscale *
2666 ((int) (quant_matrix[j]))) >> 4;
2667 level = (level - 1) | 1;
2668 level = -level;
2669 } else {
2670 level = (((level << 1) + 1) * qscale *
2671 ((int) (quant_matrix[j]))) >> 4;
2672 level = (level - 1) | 1;
de6d9b64 2673 }
d50635cd 2674 block[j] = level;
de6d9b64
FB
2675 }
2676 }
2677}
21af69f7 2678
115329f1 2679static void dct_unquantize_mpeg2_intra_c(MpegEncContext *s,
88bd7fdc 2680 int16_t *block, int n, int qscale)
9dbf1ddd
MN
2681{
2682 int i, level, nCoeffs;
0c1a9eda 2683 const uint16_t *quant_matrix;
9dbf1ddd 2684
2ad1516a
MN
2685 if(s->alternate_scan) nCoeffs= 63;
2686 else nCoeffs= s->block_last_index[n];
115329f1
DB
2687
2688 if (n < 4)
d50635cd
MN
2689 block[0] = block[0] * s->y_dc_scale;
2690 else
2691 block[0] = block[0] * s->c_dc_scale;
2692 quant_matrix = s->intra_matrix;
2693 for(i=1;i<=nCoeffs;i++) {
2694 int j= s->intra_scantable.permutated[i];
2695 level = block[j];
2696 if (level) {
2697 if (level < 0) {
2698 level = -level;
2699 level = (int)(level * qscale * quant_matrix[j]) >> 3;
2700 level = -level;
2701 } else {
2702 level = (int)(level * qscale * quant_matrix[j]) >> 3;
2703 }
2704 block[j] = level;
2705 }
2706 }
2707}
2708
e27b6e62 2709static void dct_unquantize_mpeg2_intra_bitexact(MpegEncContext *s,
88bd7fdc 2710 int16_t *block, int n, int qscale)
e27b6e62
MN
2711{
2712 int i, level, nCoeffs;
2713 const uint16_t *quant_matrix;
2714 int sum=-1;
2715
2716 if(s->alternate_scan) nCoeffs= 63;
2717 else nCoeffs= s->block_last_index[n];
2718
2719 if (n < 4)
2720 block[0] = block[0] * s->y_dc_scale;
2721 else
2722 block[0] = block[0] * s->c_dc_scale;
2723 quant_matrix = s->intra_matrix;
2724 for(i=1;i<=nCoeffs;i++) {
2725 int j= s->intra_scantable.permutated[i];
2726 level = block[j];
2727 if (level) {
2728 if (level < 0) {
2729 level = -level;
2730 level = (int)(level * qscale * quant_matrix[j]) >> 3;
2731 level = -level;
2732 } else {
2733 level = (int)(level * qscale * quant_matrix[j]) >> 3;
2734 }
2735 block[j] = level;
2736 sum+=level;
2737 }
2738 }
2739 block[63]^=sum&1;
2740}
2741
115329f1 2742static void dct_unquantize_mpeg2_inter_c(MpegEncContext *s,
88bd7fdc 2743 int16_t *block, int n, int qscale)
d50635cd
MN
2744{
2745 int i, level, nCoeffs;
2746 const uint16_t *quant_matrix;
2747 int sum=-1;
2748
2749 if(s->alternate_scan) nCoeffs= 63;
2750 else nCoeffs= s->block_last_index[n];
115329f1 2751
d50635cd
MN
2752 quant_matrix = s->inter_matrix;
2753 for(i=0; i<=nCoeffs; i++) {
2754 int j= s->intra_scantable.permutated[i];
2755 level = block[j];
2756 if (level) {
2757 if (level < 0) {
2758 level = -level;
2759 level = (((level << 1) + 1) * qscale *
2760 ((int) (quant_matrix[j]))) >> 4;
2761 level = -level;
2762 } else {
2763 level = (((level << 1) + 1) * qscale *
2764 ((int) (quant_matrix[j]))) >> 4;
2765 }
2766 block[j] = level;
2767 sum+=level;
2768 }
2769 }
2770 block[63]^=sum&1;
2771}
2772
115329f1 2773static void dct_unquantize_h263_intra_c(MpegEncContext *s,
88bd7fdc 2774 int16_t *block, int n, int qscale)
d50635cd
MN
2775{
2776 int i, level, qmul, qadd;
2777 int nCoeffs;
115329f1 2778
d50635cd 2779 assert(s->block_last_index[n]>=0);
115329f1 2780
d50635cd 2781 qmul = qscale << 1;
115329f1 2782
d50635cd 2783 if (!s->h263_aic) {
115329f1 2784 if (n < 4)
9dbf1ddd
MN
2785 block[0] = block[0] * s->y_dc_scale;
2786 else
2787 block[0] = block[0] * s->c_dc_scale;
d50635cd
MN
2788 qadd = (qscale - 1) | 1;
2789 }else{
2790 qadd = 0;
2791 }
2792 if(s->ac_pred)
2793 nCoeffs=63;
2794 else
2795 nCoeffs= s->inter_scantable.raster_end[ s->block_last_index[n] ];
2796
2797 for(i=1; i<=nCoeffs; i++) {
2798 level = block[i];
2799 if (level) {
2800 if (level < 0) {
2801 level = level * qmul - qadd;
2802 } else {
2803 level = level * qmul + qadd;
9dbf1ddd 2804 }
d50635cd 2805 block[i] = level;
9dbf1ddd 2806 }
9dbf1ddd
MN
2807 }
2808}
2809
115329f1 2810static void dct_unquantize_h263_inter_c(MpegEncContext *s,
88bd7fdc 2811 int16_t *block, int n, int qscale)
21af69f7
FB
2812{
2813 int i, level, qmul, qadd;
badaf88e 2814 int nCoeffs;
115329f1 2815
2ad1516a 2816 assert(s->block_last_index[n]>=0);
115329f1 2817
2ad1516a
MN
2818 qadd = (qscale - 1) | 1;
2819 qmul = qscale << 1;
115329f1 2820
d50635cd 2821 nCoeffs= s->inter_scantable.raster_end[ s->block_last_index[n] ];
21af69f7 2822
d50635cd 2823 for(i=0; i<=nCoeffs; i++) {
21af69f7
FB
2824 level = block[i];
2825 if (level) {
2826 if (level < 0) {
2827 level = level * qmul - qadd;
2828 } else {
2829 level = level * qmul + qadd;
2830 }
21af69f7
FB
2831 block[i] = level;
2832 }
2833 }
2834}
de6d9b64 2835
b776e3d1
AJ
2836/**
2837 * set qscale and update qscale dependent variables.
2838 */
2839void ff_set_qscale(MpegEncContext * s, int qscale)
2840{
2841 if (qscale < 1)
2842 qscale = 1;
2843 else if (qscale > 31)
2844 qscale = 31;
2845
2846 s->qscale = qscale;
2847 s->chroma_qscale= s->chroma_qscale_table[qscale];
2848
2849 s->y_dc_scale= s->y_dc_scale_table[ qscale ];
2850 s->c_dc_scale= s->c_dc_scale_table[ s->chroma_qscale ];
2851}
6a9c8594 2852
efd29844 2853void ff_MPV_report_decode_progress(MpegEncContext *s)
6a9c8594 2854{
54974c62 2855 if (s->pict_type != AV_PICTURE_TYPE_B && !s->partitioned_frame && !s->er.error_occurred)
47c0ac96 2856 ff_thread_report_progress(&s->current_picture_ptr->f, s->mb_y, 0);
6a9c8594 2857}
54974c62
AK
2858
2859void ff_mpeg_er_frame_start(MpegEncContext *s)
2860{
2861 ERContext *er = &s->er;
2862
2863 er->cur_pic = s->current_picture_ptr;
2864 er->last_pic = s->last_picture_ptr;
2865 er->next_pic = s->next_picture_ptr;
2866
2867 er->pp_time = s->pp_time;
2868 er->pb_time = s->pb_time;
2869 er->quarter_sample = s->quarter_sample;
2870 er->partitioned_frame = s->partitioned_frame;
2871
2872 ff_er_frame_start(er);
2873}