2 * Microsoft Screen 2 (aka Windows Media Video V9 Screen) decoder
4 * This file is part of Libav.
6 * Libav is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU Lesser General Public
8 * License as published by the Free Software Foundation; either
9 * version 2.1 of the License, or (at your option) any later version.
11 * Libav is distributed in the hope that it will be useful,
12 * but WITHOUT ANY WARRANTY; without even the implied warranty of
13 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
14 * Lesser General Public License for more details.
16 * You should have received a copy of the GNU Lesser General Public
17 * License along with Libav; if not, write to the Free Software
18 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
23 * Microsoft Screen 2 (aka Windows Media Video V9 Screen) decoder
26 #include "libavutil/avassert.h"
28 #include "msmpeg4data.h"
33 typedef struct MSS2Context
{
43 static void arith2_normalise(ArithCoder
*c
)
45 while ((c
->high
>> 15) - (c
->low
>> 15) < 2) {
46 if ((c
->low
^ c
->high
) & 0x10000) {
51 c
->high
= c
->high
<< 8 & 0xFFFFFF | 0xFF;
52 c
->value
= c
->value
<< 8 & 0xFFFFFF | bytestream2_get_byte(c
->gbc
.gB
);
53 c
->low
= c
->low
<< 8 & 0xFFFFFF;
59 /* L. Stuiver and A. Moffat: "Piecewise Integer Mapping for Arithmetic Coding."
60 * In Proc. 8th Data Compression Conference (DCC '98), pp. 3-12, Mar. 1998 */
62 static int arith2_get_scaled_value(int value
, int n
, int range
)
64 int split
= (n
<< 1) - range
;
67 return split
+ (value
- split
>> 1);
72 static void arith2_rescale_interval(ArithCoder
*c
, int range
,
73 int low
, int high
, int n
)
75 int split
= (n
<< 1) - range
;
78 c
->high
= split
+ (high
- split
<< 1);
82 c
->high
+= c
->low
- 1;
85 c
->low
+= split
+ (low
- split
<< 1);
90 static int arith2_get_number(ArithCoder
*c
, int n
)
92 int range
= c
->high
- c
->low
+ 1;
93 int scale
= av_log2(range
) - av_log2(n
);
96 if (n
<< scale
> range
)
101 val
= arith2_get_scaled_value(c
->value
- c
->low
, n
, range
) >> scale
;
103 arith2_rescale_interval(c
, range
, val
<< scale
, (val
+ 1) << scale
, n
);
110 static int arith2_get_prob(ArithCoder
*c
, int16_t *probs
)
112 int range
= c
->high
- c
->low
+ 1, n
= *probs
;
113 int scale
= av_log2(range
) - av_log2(n
);
116 if (n
<< scale
> range
)
121 val
= arith2_get_scaled_value(c
->value
- c
->low
, n
, range
) >> scale
;
122 while (probs
[++i
] > val
) ;
124 arith2_rescale_interval(c
, range
,
125 probs
[i
] << scale
, probs
[i
- 1] << scale
, n
);
130 ARITH_GET_MODEL_SYM(2)
132 static int arith2_get_consumed_bytes(ArithCoder
*c
)
134 int diff
= (c
->high
>> 16) - (c
->low
>> 16);
135 int bp
= bytestream2_tell(c
->gbc
.gB
) - 3 << 3;
138 while (!(diff
& 0x80)) {
143 return (bits
+ bp
+ 7 >> 3) + ((c
->low
>> 16) + 1 == c
->high
>> 16);
146 static void arith2_init(ArithCoder
*c
, GetByteContext
*gB
)
150 c
->value
= bytestream2_get_be24(gB
);
152 c
->get_model_sym
= arith2_get_model_sym
;
153 c
->get_number
= arith2_get_number
;
156 static int decode_pal_v2(MSS12Context
*ctx
, const uint8_t *buf
, int buf_size
)
159 uint32_t *pal
= ctx
->pal
+ 256 - ctx
->free_colours
;
161 if (!ctx
->free_colours
)
165 if (ncol
> ctx
->free_colours
|| buf_size
< 2 + ncol
* 3)
167 for (i
= 0; i
< ncol
; i
++)
168 *pal
++ = AV_RB24(buf
+ 3 * i
);
173 static int decode_555(GetByteContext
*gB
, uint16_t *dst
, int stride
,
174 int keyframe
, int w
, int h
)
176 int last_symbol
= 0, repeat
= 0, prev_avail
= 0;
179 int x
, y
, endx
, endy
, t
;
181 #define READ_PAIR(a, b) \
182 a = bytestream2_get_byte(gB) << 4; \
183 t = bytestream2_get_byte(gB); \
185 b = (t & 0xF) << 8; \
186 b |= bytestream2_get_byte(gB); \
191 if (endx
>= w
|| endy
>= h
|| x
> endx
|| y
> endy
)
193 dst
+= x
+ stride
* y
;
204 int b
= bytestream2_get_byte(gB
);
206 last_symbol
= b
<< 8 | bytestream2_get_byte(gB
);
210 repeat
= (repeat
<< 8) + bytestream2_get_byte(gB
) + 1;
211 if (last_symbol
== -2) {
212 int skip
= FFMIN((unsigned)repeat
, dst
+ w
- p
);
217 last_symbol
= 127 - b
;
219 if (last_symbol
>= 0)
221 else if (last_symbol
== -1 && prev_avail
)
223 } while (++p
< dst
+ w
);
231 static int decode_rle(GetBitContext
*gb
, uint8_t *pal_dst
, int pal_stride
,
232 uint8_t *rgb_dst
, int rgb_stride
, uint32_t *pal
,
233 int keyframe
, int kf_slipt
, int slice
, int w
, int h
)
235 uint8_t bits
[270] = { 0 };
239 int current_length
= 0, read_codes
= 0, next_code
= 0, current_codes
= 0;
240 int remaining_codes
, surplus_codes
, i
;
242 const int alphabet_size
= 270 - keyframe
;
244 int last_symbol
= 0, repeat
= 0, prev_avail
= 0;
247 int x
, y
, clipw
, cliph
;
249 x
= get_bits(gb
, 12);
250 y
= get_bits(gb
, 12);
251 clipw
= get_bits(gb
, 12) + 1;
252 cliph
= get_bits(gb
, 12) + 1;
254 if (x
+ clipw
> w
|| y
+ cliph
> h
)
255 return AVERROR_INVALIDDATA
;
256 pal_dst
+= pal_stride
* y
+ x
;
257 rgb_dst
+= rgb_stride
* y
+ x
* 3;
264 pal_dst
+= pal_stride
* kf_slipt
;
265 rgb_dst
+= rgb_stride
* kf_slipt
;
272 /* read explicit codes */
274 while (current_codes
--) {
275 int symbol
= get_bits(gb
, 8);
276 if (symbol
>= 204 - keyframe
)
277 symbol
+= 14 - keyframe
;
278 else if (symbol
> 189)
279 symbol
= get_bits1(gb
) + (symbol
<< 1) - 190;
281 return AVERROR_INVALIDDATA
;
282 bits
[symbol
] = current_length
;
283 codes
[symbol
] = next_code
++;
288 remaining_codes
= (1 << current_length
) - next_code
;
289 current_codes
= get_bits(gb
, av_ceil_log2(remaining_codes
+ 1));
290 if (current_length
> 22 || current_codes
> remaining_codes
)
291 return AVERROR_INVALIDDATA
;
292 } while (current_codes
!= remaining_codes
);
294 remaining_codes
= alphabet_size
- read_codes
;
296 /* determine the minimum length to fit the rest of the alphabet */
297 while ((surplus_codes
= (2 << current_length
) -
298 (next_code
<< 1) - remaining_codes
) < 0) {
303 /* add the rest of the symbols lexicographically */
304 for (i
= 0; i
< alphabet_size
; i
++)
306 if (surplus_codes
-- == 0) {
310 bits
[i
] = current_length
;
311 codes
[i
] = next_code
++;
314 if (next_code
!= 1 << current_length
)
315 return AVERROR_INVALIDDATA
;
317 if (i
= init_vlc(&vlc
, 9, alphabet_size
, bits
, 1, 1, codes
, 4, 4, 0))
322 uint8_t *pp
= pal_dst
;
323 uint8_t *rp
= rgb_dst
;
326 int b
= get_vlc2(gb
, vlc
.table
, 9, 3);
332 b
= get_bits(gb
, 4) + 10;
337 repeat
= get_bits(gb
, b
);
339 repeat
+= (1 << b
) - 1;
341 if (last_symbol
== -2) {
342 int skip
= FFMIN(repeat
, pal_dst
+ w
- pp
);
348 last_symbol
= 267 - b
;
350 if (last_symbol
>= 0) {
352 AV_WB24(rp
, pal
[last_symbol
]);
353 } else if (last_symbol
== -1 && prev_avail
) {
354 *pp
= *(pp
- pal_stride
);
355 memcpy(rp
, rp
- rgb_stride
, 3);
358 } while (++pp
< pal_dst
+ w
);
359 pal_dst
+= pal_stride
;
360 rgb_dst
+= rgb_stride
;
368 static int decode_wmv9(AVCodecContext
*avctx
, const uint8_t *buf
, int buf_size
,
369 int x
, int y
, int w
, int h
, int wmv9_mask
)
371 MSS2Context
*ctx
= avctx
->priv_data
;
372 MSS12Context
*c
= &ctx
->c
;
373 VC1Context
*v
= avctx
->priv_data
;
374 MpegEncContext
*s
= &v
->s
;
377 ff_mpeg_flush(avctx
);
379 if (s
->current_picture_ptr
== NULL
|| s
->current_picture_ptr
->f
.data
[0]) {
380 int i
= ff_find_unused_picture(s
, 0);
383 s
->current_picture_ptr
= &s
->picture
[i
];
386 init_get_bits(&s
->gb
, buf
, buf_size
* 8);
388 s
->loop_filter
= avctx
->skip_loop_filter
< AVDISCARD_ALL
;
390 if (ff_vc1_parse_frame_header(v
, &s
->gb
) == -1) {
391 av_log(v
->s
.avctx
, AV_LOG_ERROR
, "header error\n");
392 return AVERROR_INVALIDDATA
;
395 if (s
->pict_type
!= AV_PICTURE_TYPE_I
) {
396 av_log(v
->s
.avctx
, AV_LOG_ERROR
, "expected I-frame\n");
397 return AVERROR_INVALIDDATA
;
400 avctx
->pix_fmt
= AV_PIX_FMT_YUV420P
;
402 if (ff_MPV_frame_start(s
, avctx
) < 0) {
403 av_log(v
->s
.avctx
, AV_LOG_ERROR
, "ff_MPV_frame_start error\n");
404 avctx
->pix_fmt
= AV_PIX_FMT_RGB24
;
408 ff_er_frame_start(s
);
410 v
->bits
= buf_size
* 8;
412 v
->end_mb_x
= (w
+ 15) >> 4;
413 s
->end_mb_y
= (h
+ 15) >> 4;
415 v
->end_mb_x
= v
->end_mb_x
+ 1 >> 1;
417 s
->end_mb_y
= s
->end_mb_y
+ 1 >> 1;
419 ff_vc1_decode_blocks(v
);
425 f
= &s
->current_picture
.f
;
427 if (v
->respic
== 3) {
428 ctx
->dsp
.upsample_plane(f
->data
[0], f
->linesize
[0], w
, h
);
429 ctx
->dsp
.upsample_plane(f
->data
[1], f
->linesize
[1], w
>> 1, h
>> 1);
430 ctx
->dsp
.upsample_plane(f
->data
[2], f
->linesize
[2], w
>> 1, h
>> 1);
431 } else if (v
->respic
)
432 av_log_ask_for_sample(v
->s
.avctx
,
433 "Asymmetric WMV9 rectangle subsampling\n");
435 av_assert0(f
->linesize
[1] == f
->linesize
[2]);
438 ctx
->dsp
.mss2_blit_wmv9_masked(c
->rgb_pic
+ y
* c
->rgb_stride
+ x
* 3,
439 c
->rgb_stride
, wmv9_mask
,
440 c
->pal_pic
+ y
* c
->pal_stride
+ x
,
442 f
->data
[0], f
->linesize
[0],
443 f
->data
[1], f
->data
[2], f
->linesize
[1],
446 ctx
->dsp
.mss2_blit_wmv9(c
->rgb_pic
+ y
* c
->rgb_stride
+ x
* 3,
448 f
->data
[0], f
->linesize
[0],
449 f
->data
[1], f
->data
[2], f
->linesize
[1],
452 avctx
->pix_fmt
= AV_PIX_FMT_RGB24
;
457 typedef struct Rectangle
{
458 int coded
, x
, y
, w
, h
;
461 #define MAX_WMV9_RECTANGLES 20
462 #define ARITH2_PADDING 2
464 static int mss2_decode_frame(AVCodecContext
*avctx
, void *data
, int *got_frame
,
467 const uint8_t *buf
= avpkt
->data
;
468 int buf_size
= avpkt
->size
;
469 MSS2Context
*ctx
= avctx
->priv_data
;
470 MSS12Context
*c
= &ctx
->c
;
475 int keyframe
, has_wmv9
, has_mv
, is_rle
, is_555
, ret
;
477 Rectangle wmv9rects
[MAX_WMV9_RECTANGLES
], *r
;
478 int used_rects
= 0, i
, implicit_rect
= 0, av_uninit(wmv9_mask
);
480 av_assert0(FF_INPUT_BUFFER_PADDING_SIZE
>=
481 ARITH2_PADDING
+ (MIN_CACHE_BITS
+ 7) / 8);
483 init_get_bits(&gb
, buf
, buf_size
* 8);
485 if (keyframe
= get_bits1(&gb
))
487 has_wmv9
= get_bits1(&gb
);
488 has_mv
= keyframe ?
0 : get_bits1(&gb
);
489 is_rle
= get_bits1(&gb
);
490 is_555
= is_rle
&& get_bits1(&gb
);
491 if (c
->slice_split
> 0)
492 ctx
->split_position
= c
->slice_split
;
493 else if (c
->slice_split
< 0) {
494 if (get_bits1(&gb
)) {
495 if (get_bits1(&gb
)) {
497 ctx
->split_position
= get_bits(&gb
, 16);
499 ctx
->split_position
= get_bits(&gb
, 12);
501 ctx
->split_position
= get_bits(&gb
, 8) << 4;
504 ctx
->split_position
= avctx
->height
/ 2;
507 ctx
->split_position
= avctx
->height
;
509 if (c
->slice_split
&& (ctx
->split_position
< 1 - is_555
||
510 ctx
->split_position
> avctx
->height
- 1))
511 return AVERROR_INVALIDDATA
;
514 buf
+= get_bits_count(&gb
) >> 3;
515 buf_size
-= get_bits_count(&gb
) >> 3;
518 return AVERROR_INVALIDDATA
;
520 if (is_555
&& (has_wmv9
|| has_mv
|| c
->slice_split
&& ctx
->split_position
))
521 return AVERROR_INVALIDDATA
;
523 avctx
->pix_fmt
= is_555 ? AV_PIX_FMT_RGB555
: AV_PIX_FMT_RGB24
;
524 if (ctx
->pic
.data
[0] && ctx
->pic
.format
!= avctx
->pix_fmt
)
525 avctx
->release_buffer(avctx
, &ctx
->pic
);
528 bytestream2_init(&gB
, buf
, buf_size
+ ARITH2_PADDING
);
529 arith2_init(&acoder
, &gB
);
531 implicit_rect
= !arith2_get_bit(&acoder
);
533 while (arith2_get_bit(&acoder
)) {
534 if (used_rects
== MAX_WMV9_RECTANGLES
)
535 return AVERROR_INVALIDDATA
;
536 r
= &wmv9rects
[used_rects
];
538 r
->x
= arith2_get_number(&acoder
, avctx
->width
);
540 r
->x
= arith2_get_number(&acoder
, avctx
->width
-
541 wmv9rects
[used_rects
- 1].x
) +
542 wmv9rects
[used_rects
- 1].x
;
543 r
->y
= arith2_get_number(&acoder
, avctx
->height
);
544 r
->w
= arith2_get_number(&acoder
, avctx
->width
- r
->x
) + 1;
545 r
->h
= arith2_get_number(&acoder
, avctx
->height
- r
->y
) + 1;
549 if (implicit_rect
&& used_rects
) {
550 av_log(avctx
, AV_LOG_ERROR
, "implicit_rect && used_rects > 0\n");
551 return AVERROR_INVALIDDATA
;
557 wmv9rects
[0].w
= avctx
->width
;
558 wmv9rects
[0].h
= avctx
->height
;
562 for (i
= 0; i
< used_rects
; i
++) {
563 if (!implicit_rect
&& arith2_get_bit(&acoder
)) {
564 av_log(avctx
, AV_LOG_ERROR
, "Unexpected grandchildren\n");
565 return AVERROR_INVALIDDATA
;
568 wmv9_mask
= arith2_get_bit(&acoder
) - 1;
570 wmv9_mask
= arith2_get_number(&acoder
, 256);
572 wmv9rects
[i
].coded
= arith2_get_number(&acoder
, 2);
575 buf
+= arith2_get_consumed_bytes(&acoder
);
576 buf_size
-= arith2_get_consumed_bytes(&acoder
);
578 return AVERROR_INVALIDDATA
;
582 if (keyframe
&& !is_555
) {
583 if ((i
= decode_pal_v2(c
, buf
, buf_size
)) < 0)
584 return AVERROR_INVALIDDATA
;
591 return AVERROR_INVALIDDATA
;
592 c
->mvX
= AV_RB16(buf
- 4) - avctx
->width
;
593 c
->mvY
= AV_RB16(buf
- 2) - avctx
->height
;
596 if (c
->mvX
< 0 || c
->mvY
< 0) {
597 FFSWAP(AVFrame
, ctx
->pic
, ctx
->last_pic
);
598 FFSWAP(uint8_t *, c
->pal_pic
, c
->last_pal_pic
);
600 if (ctx
->pic
.data
[0])
601 avctx
->release_buffer(avctx
, &ctx
->pic
);
603 ctx
->pic
.reference
= 3;
604 ctx
->pic
.buffer_hints
= FF_BUFFER_HINTS_VALID
|
605 FF_BUFFER_HINTS_READABLE
|
606 FF_BUFFER_HINTS_PRESERVE
|
607 FF_BUFFER_HINTS_REUSABLE
;
609 if ((ret
= ff_get_buffer(avctx
, &ctx
->pic
)) < 0) {
610 av_log(avctx
, AV_LOG_ERROR
, "get_buffer() failed\n");
614 if (ctx
->last_pic
.data
[0]) {
615 av_assert0(ctx
->pic
.linesize
[0] == ctx
->last_pic
.linesize
[0]);
616 c
->last_rgb_pic
= ctx
->last_pic
.data
[0] +
617 ctx
->last_pic
.linesize
[0] * (avctx
->height
- 1);
619 av_log(avctx
, AV_LOG_ERROR
, "Missing keyframe\n");
623 if (ctx
->last_pic
.data
[0])
624 avctx
->release_buffer(avctx
, &ctx
->last_pic
);
626 ctx
->pic
.reference
= 3;
627 ctx
->pic
.buffer_hints
= FF_BUFFER_HINTS_VALID
|
628 FF_BUFFER_HINTS_READABLE
|
629 FF_BUFFER_HINTS_PRESERVE
|
630 FF_BUFFER_HINTS_REUSABLE
;
632 if ((ret
= avctx
->reget_buffer(avctx
, &ctx
->pic
)) < 0) {
633 av_log(avctx
, AV_LOG_ERROR
, "reget_buffer() failed\n");
637 c
->last_rgb_pic
= NULL
;
639 c
->rgb_pic
= ctx
->pic
.data
[0] +
640 ctx
->pic
.linesize
[0] * (avctx
->height
- 1);
641 c
->rgb_stride
= -ctx
->pic
.linesize
[0];
643 ctx
->pic
.key_frame
= keyframe
;
644 ctx
->pic
.pict_type
= keyframe ? AV_PICTURE_TYPE_I
: AV_PICTURE_TYPE_P
;
647 bytestream2_init(&gB
, buf
, buf_size
);
649 if (decode_555(&gB
, (uint16_t *)c
->rgb_pic
, c
->rgb_stride
>> 1,
650 keyframe
, avctx
->width
, avctx
->height
))
651 return AVERROR_INVALIDDATA
;
653 buf_size
-= bytestream2_tell(&gB
);
657 ff_mss12_slicecontext_reset(&ctx
->sc
[0]);
659 ff_mss12_slicecontext_reset(&ctx
->sc
[1]);
662 init_get_bits(&gb
, buf
, buf_size
* 8);
663 if (ret
= decode_rle(&gb
, c
->pal_pic
, c
->pal_stride
,
664 c
->rgb_pic
, c
->rgb_stride
, c
->pal
, keyframe
,
665 ctx
->split_position
, 0,
666 avctx
->width
, avctx
->height
))
671 if (ret
= decode_rle(&gb
, c
->pal_pic
, c
->pal_stride
,
672 c
->rgb_pic
, c
->rgb_stride
, c
->pal
, keyframe
,
673 ctx
->split_position
, 1,
674 avctx
->width
, avctx
->height
))
678 buf
+= get_bits_count(&gb
) >> 3;
679 buf_size
-= get_bits_count(&gb
) >> 3;
680 } else if (!implicit_rect
|| wmv9_mask
!= -1) {
682 return AVERROR_INVALIDDATA
;
683 bytestream2_init(&gB
, buf
, buf_size
+ ARITH2_PADDING
);
684 arith2_init(&acoder
, &gB
);
685 c
->keyframe
= keyframe
;
686 if (c
->corrupted
= ff_mss12_decode_rect(&ctx
->sc
[0], &acoder
, 0, 0,
688 ctx
->split_position
))
689 return AVERROR_INVALIDDATA
;
691 buf
+= arith2_get_consumed_bytes(&acoder
);
692 buf_size
-= arith2_get_consumed_bytes(&acoder
);
693 if (c
->slice_split
) {
695 return AVERROR_INVALIDDATA
;
696 bytestream2_init(&gB
, buf
, buf_size
+ ARITH2_PADDING
);
697 arith2_init(&acoder
, &gB
);
698 if (c
->corrupted
= ff_mss12_decode_rect(&ctx
->sc
[1], &acoder
, 0,
701 avctx
->height
- ctx
->split_position
))
702 return AVERROR_INVALIDDATA
;
704 buf
+= arith2_get_consumed_bytes(&acoder
);
705 buf_size
-= arith2_get_consumed_bytes(&acoder
);
708 memset(c
->pal_pic
, 0, c
->pal_stride
* avctx
->height
);
712 for (i
= 0; i
< used_rects
; i
++) {
713 int x
= wmv9rects
[i
].x
;
714 int y
= wmv9rects
[i
].y
;
715 int w
= wmv9rects
[i
].w
;
716 int h
= wmv9rects
[i
].h
;
717 if (wmv9rects
[i
].coded
) {
718 int WMV9codedFrameSize
;
719 if (buf_size
< 4 || !(WMV9codedFrameSize
= AV_RL24(buf
)))
720 return AVERROR_INVALIDDATA
;
721 if (ret
= decode_wmv9(avctx
, buf
+ 3, buf_size
- 3,
722 x
, y
, w
, h
, wmv9_mask
))
724 buf
+= WMV9codedFrameSize
+ 3;
725 buf_size
-= WMV9codedFrameSize
+ 3;
727 uint8_t *dst
= c
->rgb_pic
+ y
* c
->rgb_stride
+ x
* 3;
728 if (wmv9_mask
!= -1) {
729 ctx
->dsp
.mss2_gray_fill_masked(dst
, c
->rgb_stride
,
731 c
->pal_pic
+ y
* c
->pal_stride
+ x
,
736 memset(dst
, 0x80, w
* 3);
737 dst
+= c
->rgb_stride
;
745 av_log(avctx
, AV_LOG_WARNING
, "buffer not fully consumed\n");
748 *(AVFrame
*)data
= ctx
->pic
;
753 static av_cold
int wmv9_init(AVCodecContext
*avctx
)
755 VC1Context
*v
= avctx
->priv_data
;
758 avctx
->flags
|= CODEC_FLAG_EMU_EDGE
;
759 v
->s
.flags
|= CODEC_FLAG_EMU_EDGE
;
761 if (avctx
->idct_algo
== FF_IDCT_AUTO
)
762 avctx
->idct_algo
= FF_IDCT_WMV2
;
764 if (ff_vc1_init_common(v
) < 0)
766 ff_vc1dsp_init(&v
->vc1dsp
);
768 v
->profile
= PROFILE_MAIN
;
770 v
->zz_8x4
= ff_wmv2_scantableA
;
771 v
->zz_4x8
= ff_wmv2_scantableB
;
775 v
->frmrtq_postproc
= 7;
776 v
->bitrtq_postproc
= 31;
793 v
->s
.resync_marker
= 0;
796 v
->s
.max_b_frames
= avctx
->max_b_frames
= 0;
797 v
->quantizer_mode
= 0;
803 ff_vc1_init_transposed_scantables(v
);
805 if (ff_msmpeg4_decode_init(avctx
) < 0 ||
806 ff_vc1_decode_init_alloc_tables(v
) < 0)
809 /* error concealment */
810 v
->s
.me
.qpel_put
= v
->s
.dsp
.put_qpel_pixels_tab
;
811 v
->s
.me
.qpel_avg
= v
->s
.dsp
.avg_qpel_pixels_tab
;
816 static av_cold
int mss2_decode_end(AVCodecContext
*avctx
)
818 MSS2Context
*const ctx
= avctx
->priv_data
;
820 if (ctx
->pic
.data
[0])
821 avctx
->release_buffer(avctx
, &ctx
->pic
);
822 if (ctx
->last_pic
.data
[0])
823 avctx
->release_buffer(avctx
, &ctx
->last_pic
);
825 ff_mss12_decode_end(&ctx
->c
);
826 av_freep(&ctx
->c
.pal_pic
);
827 av_freep(&ctx
->c
.last_pal_pic
);
828 ff_vc1_decode_end(avctx
);
833 static av_cold
int mss2_decode_init(AVCodecContext
*avctx
)
835 MSS2Context
* const ctx
= avctx
->priv_data
;
836 MSS12Context
*c
= &ctx
->c
;
839 avctx
->coded_frame
= &ctx
->pic
;
840 if (ret
= ff_mss12_decode_init(c
, 1, &ctx
->sc
[0], &ctx
->sc
[1]))
842 c
->pal_stride
= c
->mask_stride
;
843 c
->pal_pic
= av_mallocz(c
->pal_stride
* avctx
->height
);
844 c
->last_pal_pic
= av_mallocz(c
->pal_stride
* avctx
->height
);
845 if (!c
->pal_pic
|| !c
->last_pal_pic
) {
846 mss2_decode_end(avctx
);
847 return AVERROR(ENOMEM
);
849 if (ret
= wmv9_init(avctx
)) {
850 mss2_decode_end(avctx
);
853 ff_mss2dsp_init(&ctx
->dsp
);
855 avctx
->pix_fmt
= c
->free_colours
== 127 ? AV_PIX_FMT_RGB555
861 AVCodec ff_mss2_decoder
= {
863 .type
= AVMEDIA_TYPE_VIDEO
,
864 .id
= AV_CODEC_ID_MSS2
,
865 .priv_data_size
= sizeof(MSS2Context
),
866 .init
= mss2_decode_init
,
867 .close
= mss2_decode_end
,
868 .decode
= mss2_decode_frame
,
869 .capabilities
= CODEC_CAP_DR1
,
870 .long_name
= NULL_IF_CONFIG_SMALL("MS Windows Media Video V9 Screen"),