pgssubdec: fix API compability layer
[libav.git] / libavcodec / pgssubdec.c
CommitLineData
c58b82a2
SB
1/*
2 * PGS subtitle decoder
3 * Copyright (c) 2009 Stephen Backway
4 *
2912e87a 5 * This file is part of Libav.
c58b82a2 6 *
2912e87a 7 * Libav is free software; you can redistribute it and/or
c58b82a2
SB
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
11 *
2912e87a 12 * Libav is distributed in the hope that it will be useful,
c58b82a2
SB
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
2912e87a 18 * License along with Libav; if not, write to the Free Software
c58b82a2
SB
19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20 */
21
22/**
ba87f080 23 * @file
c58b82a2
SB
24 * PGS subtitle decoder
25 */
26
27#include "avcodec.h"
c58b82a2 28#include "bytestream.h"
41ad353d 29#include "internal.h"
f9279ee7 30#include "mathops.h"
41ad353d 31
2b4abbd6 32#include "libavutil/colorspace.h"
737eb597 33#include "libavutil/imgutils.h"
c58b82a2 34
c58b82a2 35#define RGBA(r,g,b,a) (((a) << 24) | ((r) << 16) | ((g) << 8) | (b))
253d0be6
JS
36#define MAX_EPOCH_PALETTES 8 // Max 8 allowed per PGS epoch
37#define MAX_EPOCH_OBJECTS 64 // Max 64 allowed per PGS epoch
38#define MAX_OBJECT_REFS 2 // Max objects per display set
c58b82a2
SB
39
40enum SegmentType {
41 PALETTE_SEGMENT = 0x14,
253d0be6 42 OBJECT_SEGMENT = 0x15,
c58b82a2
SB
43 PRESENTATION_SEGMENT = 0x16,
44 WINDOW_SEGMENT = 0x17,
45 DISPLAY_SEGMENT = 0x80,
46};
47
253d0be6
JS
48typedef struct PGSSubObjectRef {
49 int id;
50 int window_id;
51 uint8_t composition_flag;
52 int x;
53 int y;
54 int crop_x;
55 int crop_y;
56 int crop_w;
57 int crop_h;
58} PGSSubObjectRef;
59
c58b82a2 60typedef struct PGSSubPresentation {
c58b82a2 61 int id_number;
253d0be6
JS
62 int palette_id;
63 int object_count;
64 PGSSubObjectRef objects[MAX_OBJECT_REFS];
1c580552 65 int64_t pts;
c58b82a2
SB
66} PGSSubPresentation;
67
253d0be6
JS
68typedef struct PGSSubObject {
69 int id;
c58b82a2
SB
70 int w;
71 int h;
72 uint8_t *rle;
73 unsigned int rle_buffer_size, rle_data_len;
83cd9112 74 unsigned int rle_remaining_len;
253d0be6
JS
75} PGSSubObject;
76
77typedef struct PGSSubObjects {
78 int count;
79 PGSSubObject object[MAX_EPOCH_OBJECTS];
80} PGSSubObjects;
81
82typedef struct PGSSubPalette {
83 int id;
84 uint32_t clut[256];
85} PGSSubPalette;
86
87typedef struct PGSSubPalettes {
88 int count;
89 PGSSubPalette palette[MAX_EPOCH_PALETTES];
90} PGSSubPalettes;
c58b82a2
SB
91
92typedef struct PGSSubContext {
93 PGSSubPresentation presentation;
253d0be6
JS
94 PGSSubPalettes palettes;
95 PGSSubObjects objects;
c58b82a2
SB
96} PGSSubContext;
97
253d0be6
JS
98static void flush_cache(AVCodecContext *avctx)
99{
100 PGSSubContext *ctx = avctx->priv_data;
101 int i;
102
103 for (i = 0; i < ctx->objects.count; i++) {
104 av_freep(&ctx->objects.object[i].rle);
105 ctx->objects.object[i].rle_buffer_size = 0;
106 ctx->objects.object[i].rle_remaining_len = 0;
107 }
108 ctx->objects.count = 0;
109 ctx->palettes.count = 0;
110}
111
112static PGSSubObject * find_object(int id, PGSSubObjects *objects)
113{
114 int i;
115
116 for (i = 0; i < objects->count; i++) {
117 if (objects->object[i].id == id)
118 return &objects->object[i];
119 }
120 return NULL;
121}
122
123static PGSSubPalette * find_palette(int id, PGSSubPalettes *palettes)
124{
125 int i;
126
127 for (i = 0; i < palettes->count; i++) {
128 if (palettes->palette[i].id == id)
129 return &palettes->palette[i];
130 }
131 return NULL;
132}
133
c58b82a2
SB
134static av_cold int init_decoder(AVCodecContext *avctx)
135{
716d413c 136 avctx->pix_fmt = AV_PIX_FMT_PAL8;
c58b82a2
SB
137
138 return 0;
139}
140
141static av_cold int close_decoder(AVCodecContext *avctx)
142{
253d0be6 143 flush_cache(avctx);
c58b82a2
SB
144
145 return 0;
146}
147
148/**
49bd8e4b 149 * Decode the RLE data.
c58b82a2
SB
150 *
151 * The subtitle is stored as an Run Length Encoded image.
152 *
153 * @param avctx contains the current codec context
154 * @param sub pointer to the processed subtitle data
155 * @param buf pointer to the RLE data to process
156 * @param buf_size size of the RLE data to process
157 */
253d0be6 158static int decode_rle(AVCodecContext *avctx, AVSubtitleRect *rect,
c58b82a2
SB
159 const uint8_t *buf, unsigned int buf_size)
160{
161 const uint8_t *rle_bitmap_end;
162 int pixel_count, line_count;
163
164 rle_bitmap_end = buf + buf_size;
165
a17a7661 166 rect->data[0] = av_malloc(rect->w * rect->h);
c58b82a2 167
a17a7661 168 if (!rect->data[0])
253d0be6 169 return AVERROR(ENOMEM);
c58b82a2
SB
170
171 pixel_count = 0;
172 line_count = 0;
173
253d0be6 174 while (buf < rle_bitmap_end && line_count < rect->h) {
c58b82a2
SB
175 uint8_t flags, color;
176 int run;
177
178 color = bytestream_get_byte(&buf);
179 run = 1;
180
181 if (color == 0x00) {
182 flags = bytestream_get_byte(&buf);
183 run = flags & 0x3f;
184 if (flags & 0x40)
185 run = (run << 8) + bytestream_get_byte(&buf);
186 color = flags & 0x80 ? bytestream_get_byte(&buf) : 0;
187 }
188
253d0be6 189 if (run > 0 && pixel_count + run <= rect->w * rect->h) {
a17a7661 190 memset(rect->data[0] + pixel_count, color, run);
c58b82a2
SB
191 pixel_count += run;
192 } else if (!run) {
193 /*
194 * New Line. Check if correct pixels decoded, if not display warning
195 * and adjust bitmap pointer to correct new line position.
196 */
253d0be6 197 if (pixel_count % rect->w > 0) {
c58b82a2 198 av_log(avctx, AV_LOG_ERROR, "Decoded %d pixels, when line should be %d pixels\n",
253d0be6
JS
199 pixel_count % rect->w, rect->w);
200 if (avctx->err_recognition & AV_EF_EXPLODE) {
201 return AVERROR_INVALIDDATA;
202 }
203 }
c58b82a2
SB
204 line_count++;
205 }
206 }
207
253d0be6 208 if (pixel_count < rect->w * rect->h) {
6b133d7e 209 av_log(avctx, AV_LOG_ERROR, "Insufficient RLE data for subtitle\n");
253d0be6 210 return AVERROR_INVALIDDATA;
6b133d7e
RD
211 }
212
6a85dfc8 213 ff_dlog(avctx, "Pixel Count = %d, Area = %d\n", pixel_count, rect->w * rect->h);
c58b82a2
SB
214
215 return 0;
216}
217
218/**
49bd8e4b 219 * Parse the picture segment packet.
c58b82a2
SB
220 *
221 * The picture segment contains details on the sequence id,
222 * width, height and Run Length Encoded (RLE) bitmap data.
223 *
224 * @param avctx contains the current codec context
225 * @param buf pointer to the packet to process
226 * @param buf_size size of packet to process
c58b82a2 227 */
253d0be6 228static int parse_object_segment(AVCodecContext *avctx,
c58b82a2
SB
229 const uint8_t *buf, int buf_size)
230{
231 PGSSubContext *ctx = avctx->priv_data;
253d0be6 232 PGSSubObject *object;
c58b82a2
SB
233
234 uint8_t sequence_desc;
235 unsigned int rle_bitmap_len, width, height;
253d0be6 236 int id;
c58b82a2 237
83cd9112 238 if (buf_size <= 4)
253d0be6 239 return AVERROR_INVALIDDATA;
83cd9112
MG
240 buf_size -= 4;
241
253d0be6
JS
242 id = bytestream_get_be16(&buf);
243 object = find_object(id, &ctx->objects);
244 if (!object) {
245 if (ctx->objects.count >= MAX_EPOCH_OBJECTS) {
246 av_log(avctx, AV_LOG_ERROR, "Too many objects in epoch\n");
247 return AVERROR_INVALIDDATA;
248 }
249 object = &ctx->objects.object[ctx->objects.count++];
250 object->id = id;
251 }
252
253 /* skip object version number */
254 buf += 1;
c58b82a2
SB
255
256 /* Read the Sequence Description to determine if start of RLE data or appended to previous RLE */
257 sequence_desc = bytestream_get_byte(&buf);
258
259 if (!(sequence_desc & 0x80)) {
83cd9112 260 /* Additional RLE data */
253d0be6
JS
261 if (buf_size > object->rle_remaining_len)
262 return AVERROR_INVALIDDATA;
c58b82a2 263
253d0be6
JS
264 memcpy(object->rle + object->rle_data_len, buf, buf_size);
265 object->rle_data_len += buf_size;
266 object->rle_remaining_len -= buf_size;
c58b82a2 267
83cd9112 268 return 0;
c58b82a2
SB
269 }
270
83cd9112 271 if (buf_size <= 7)
253d0be6 272 return AVERROR_INVALIDDATA;
83cd9112
MG
273 buf_size -= 7;
274
275 /* Decode rle bitmap length, stored size includes width/height data */
276 rle_bitmap_len = bytestream_get_be24(&buf) - 2*2;
c58b82a2 277
d98e6c5d
MN
278 if (buf_size > rle_bitmap_len) {
279 av_log(avctx, AV_LOG_ERROR,
280 "Buffer dimension %d larger than the expected RLE data %d\n",
281 buf_size, rle_bitmap_len);
282 return AVERROR_INVALIDDATA;
283 }
284
c58b82a2
SB
285 /* Get bitmap dimensions from data */
286 width = bytestream_get_be16(&buf);
287 height = bytestream_get_be16(&buf);
288
289 /* Make sure the bitmap is not too large */
a29b1700 290 if (avctx->width < width || avctx->height < height) {
be73d76b 291 av_log(avctx, AV_LOG_ERROR, "Bitmap dimensions larger than video.\n");
253d0be6 292 return AVERROR_INVALIDDATA;
c58b82a2
SB
293 }
294
253d0be6
JS
295 object->w = width;
296 object->h = height;
c58b82a2 297
253d0be6 298 av_fast_malloc(&object->rle, &object->rle_buffer_size, rle_bitmap_len);
c58b82a2 299
253d0be6
JS
300 if (!object->rle)
301 return AVERROR(ENOMEM);
c58b82a2 302
253d0be6
JS
303 memcpy(object->rle, buf, buf_size);
304 object->rle_data_len = buf_size;
305 object->rle_remaining_len = rle_bitmap_len - buf_size;
c58b82a2
SB
306
307 return 0;
308}
309
310/**
49bd8e4b 311 * Parse the palette segment packet.
c58b82a2
SB
312 *
313 * The palette segment contains details of the palette,
314 * a maximum of 256 colors can be defined.
315 *
316 * @param avctx contains the current codec context
317 * @param buf pointer to the packet to process
318 * @param buf_size size of packet to process
319 */
253d0be6 320static int parse_palette_segment(AVCodecContext *avctx,
c58b82a2
SB
321 const uint8_t *buf, int buf_size)
322{
323 PGSSubContext *ctx = avctx->priv_data;
253d0be6 324 PGSSubPalette *palette;
c58b82a2
SB
325
326 const uint8_t *buf_end = buf + buf_size;
05563cca 327 const uint8_t *cm = ff_crop_tab + MAX_NEG_CROP;
c58b82a2
SB
328 int color_id;
329 int y, cb, cr, alpha;
330 int r, g, b, r_add, g_add, b_add;
253d0be6
JS
331 int id;
332
333 id = bytestream_get_byte(&buf);
334 palette = find_palette(id, &ctx->palettes);
335 if (!palette) {
336 if (ctx->palettes.count >= MAX_EPOCH_PALETTES) {
337 av_log(avctx, AV_LOG_ERROR, "Too many palettes in epoch\n");
338 return AVERROR_INVALIDDATA;
339 }
340 palette = &ctx->palettes.palette[ctx->palettes.count++];
341 palette->id = id;
342 }
c58b82a2 343
253d0be6
JS
344 /* Skip palette version */
345 buf += 1;
c58b82a2
SB
346
347 while (buf < buf_end) {
348 color_id = bytestream_get_byte(&buf);
349 y = bytestream_get_byte(&buf);
c58b82a2 350 cr = bytestream_get_byte(&buf);
d980d7b1 351 cb = bytestream_get_byte(&buf);
c58b82a2
SB
352 alpha = bytestream_get_byte(&buf);
353
354 YUV_TO_RGB1(cb, cr);
355 YUV_TO_RGB2(r, g, b, y);
356
6a85dfc8 357 ff_dlog(avctx, "Color %d := (%d,%d,%d,%d)\n", color_id, r, g, b, alpha);
c58b82a2
SB
358
359 /* Store color in palette */
253d0be6 360 palette->clut[color_id] = RGBA(r,g,b,alpha);
c58b82a2 361 }
253d0be6 362 return 0;
c58b82a2
SB
363}
364
365/**
49bd8e4b 366 * Parse the presentation segment packet.
c58b82a2
SB
367 *
368 * The presentation segment contains details on the video
369 * width, video height, x & y subtitle position.
370 *
371 * @param avctx contains the current codec context
372 * @param buf pointer to the packet to process
373 * @param buf_size size of packet to process
374 * @todo TODO: Implement cropping
c58b82a2 375 */
41ad353d
AK
376static int parse_presentation_segment(AVCodecContext *avctx,
377 const uint8_t *buf, int buf_size,
378 int64_t pts)
c58b82a2
SB
379{
380 PGSSubContext *ctx = avctx->priv_data;
381
253d0be6 382 int i, state, ret;
c58b82a2 383
253d0be6 384 // Video descriptor
a29b1700
RD
385 int w = bytestream_get_be16(&buf);
386 int h = bytestream_get_be16(&buf);
c58b82a2 387
1c580552
JS
388 ctx->presentation.pts = pts;
389
6a85dfc8 390 ff_dlog(avctx, "Video Dimensions %dx%d\n",
a29b1700 391 w, h);
41ad353d
AK
392 ret = ff_set_dimensions(avctx, w, h);
393 if (ret < 0)
394 return ret;
c58b82a2 395
253d0be6 396 /* Skip 1 bytes of unknown, frame rate */
c58b82a2
SB
397 buf++;
398
253d0be6 399 // Composition descriptor
c58b82a2 400 ctx->presentation.id_number = bytestream_get_be16(&buf);
c4d5ee23 401 /*
253d0be6
JS
402 * state is a 2 bit field that defines pgs epoch boundaries
403 * 00 - Normal, previously defined objects and palettes are still valid
404 * 01 - Acquisition point, previous objects and palettes can be released
405 * 10 - Epoch start, previous objects and palettes can be released
406 * 11 - Epoch continue, previous objects and palettes can be released
407 *
408 * reserved 6 bits discarded
c4d5ee23 409 */
253d0be6
JS
410 state = bytestream_get_byte(&buf) >> 6;
411 if (state != 0) {
412 flush_cache(avctx);
413 }
c4d5ee23
RD
414
415 /*
253d0be6 416 * skip palette_update_flag (0x80),
c4d5ee23 417 */
253d0be6
JS
418 buf += 1;
419 ctx->presentation.palette_id = bytestream_get_byte(&buf);
420 ctx->presentation.object_count = bytestream_get_byte(&buf);
421 if (ctx->presentation.object_count > MAX_OBJECT_REFS) {
422 av_log(avctx, AV_LOG_ERROR,
423 "Invalid number of presentation objects %d\n",
424 ctx->presentation.object_count);
425 ctx->presentation.object_count = 2;
426 if (avctx->err_recognition & AV_EF_EXPLODE) {
427 return AVERROR_INVALIDDATA;
428 }
429 }
c4d5ee23 430
253d0be6
JS
431 for (i = 0; i < ctx->presentation.object_count; i++)
432 {
433 ctx->presentation.objects[i].id = bytestream_get_be16(&buf);
434 ctx->presentation.objects[i].window_id = bytestream_get_byte(&buf);
435 ctx->presentation.objects[i].composition_flag = bytestream_get_byte(&buf);
436
437 ctx->presentation.objects[i].x = bytestream_get_be16(&buf);
438 ctx->presentation.objects[i].y = bytestream_get_be16(&buf);
439
440 // If cropping
441 if (ctx->presentation.objects[i].composition_flag & 0x80) {
442 ctx->presentation.objects[i].crop_x = bytestream_get_be16(&buf);
443 ctx->presentation.objects[i].crop_y = bytestream_get_be16(&buf);
444 ctx->presentation.objects[i].crop_w = bytestream_get_be16(&buf);
445 ctx->presentation.objects[i].crop_h = bytestream_get_be16(&buf);
446 }
c4d5ee23 447
6a85dfc8 448 ff_dlog(avctx, "Subtitle Placement x=%d, y=%d\n",
253d0be6
JS
449 ctx->presentation.objects[i].x, ctx->presentation.objects[i].y);
450
451 if (ctx->presentation.objects[i].x > avctx->width ||
452 ctx->presentation.objects[i].y > avctx->height) {
453 av_log(avctx, AV_LOG_ERROR, "Subtitle out of video bounds. x = %d, y = %d, video width = %d, video height = %d.\n",
454 ctx->presentation.objects[i].x,
455 ctx->presentation.objects[i].y,
456 avctx->width, avctx->height);
457 ctx->presentation.objects[i].x = 0;
458 ctx->presentation.objects[i].y = 0;
459 if (avctx->err_recognition & AV_EF_EXPLODE) {
460 return AVERROR_INVALIDDATA;
461 }
462 }
c4d5ee23 463 }
c58b82a2 464
41ad353d 465 return 0;
c58b82a2
SB
466}
467
468/**
49bd8e4b 469 * Parse the display segment packet.
c58b82a2
SB
470 *
471 * The display segment controls the updating of the display.
472 *
473 * @param avctx contains the current codec context
474 * @param data pointer to the data pertaining the subtitle to display
475 * @param buf pointer to the packet to process
476 * @param buf_size size of packet to process
c58b82a2
SB
477 */
478static int display_end_segment(AVCodecContext *avctx, void *data,
479 const uint8_t *buf, int buf_size)
480{
481 AVSubtitle *sub = data;
482 PGSSubContext *ctx = avctx->priv_data;
253d0be6
JS
483 PGSSubPalette *palette;
484 int i, ret;
c58b82a2 485
b0231614 486 memset(sub, 0, sizeof(*sub));
1c580552 487 sub->pts = ctx->presentation.pts;
c58b82a2 488 sub->start_display_time = 0;
253d0be6
JS
489 // There is no explicit end time for PGS subtitles. The end time
490 // is defined by the start of the next sub which may contain no
491 // objects (i.e. clears the previous sub)
492 sub->end_display_time = UINT32_MAX;
c58b82a2
SB
493 sub->format = 0;
494
253d0be6
JS
495 // Blank if last object_count was 0.
496 if (!ctx->presentation.object_count)
497 return 1;
498 sub->rects = av_mallocz(sizeof(*sub->rects) * ctx->presentation.object_count);
499 if (!sub->rects) {
500 return AVERROR(ENOMEM);
501 }
502 palette = find_palette(ctx->presentation.palette_id, &ctx->palettes);
503 if (!palette) {
504 // Missing palette. Should only happen with damaged streams.
505 av_log(avctx, AV_LOG_ERROR, "Invalid palette id %d\n",
506 ctx->presentation.palette_id);
507 avsubtitle_free(sub);
508 return AVERROR_INVALIDDATA;
509 }
510 for (i = 0; i < ctx->presentation.object_count; i++) {
511 PGSSubObject *object;
a17a7661
VG
512 AVSubtitleRect *rect;
513 int j;
85f67c48 514
253d0be6
JS
515 sub->rects[i] = av_mallocz(sizeof(*sub->rects[0]));
516 if (!sub->rects[i]) {
517 avsubtitle_free(sub);
518 return AVERROR(ENOMEM);
519 }
520 sub->num_rects++;
521 sub->rects[i]->type = SUBTITLE_BITMAP;
522
523 /* Process bitmap */
524 object = find_object(ctx->presentation.objects[i].id, &ctx->objects);
525 if (!object) {
526 // Missing object. Should only happen with damaged streams.
527 av_log(avctx, AV_LOG_ERROR, "Invalid object id %d\n",
528 ctx->presentation.objects[i].id);
529 if (avctx->err_recognition & AV_EF_EXPLODE) {
530 avsubtitle_free(sub);
531 return AVERROR_INVALIDDATA;
532 }
533 // Leaves rect empty with 0 width and height.
534 continue;
535 }
536 if (ctx->presentation.objects[i].composition_flag & 0x40)
537 sub->rects[i]->flags |= AV_SUBTITLE_FLAG_FORCED;
538
539 sub->rects[i]->x = ctx->presentation.objects[i].x;
540 sub->rects[i]->y = ctx->presentation.objects[i].y;
541 sub->rects[i]->w = object->w;
542 sub->rects[i]->h = object->h;
543
a17a7661
VG
544 sub->rects[i]->linesize[0] = object->w;
545
253d0be6
JS
546 if (object->rle) {
547 if (object->rle_remaining_len) {
548 av_log(avctx, AV_LOG_ERROR, "RLE data length %u is %u bytes shorter than expected\n",
549 object->rle_data_len, object->rle_remaining_len);
550 if (avctx->err_recognition & AV_EF_EXPLODE) {
551 avsubtitle_free(sub);
552 return AVERROR_INVALIDDATA;
553 }
554 }
555 ret = decode_rle(avctx, sub->rects[i], object->rle, object->rle_data_len);
556 if (ret < 0) {
557 if ((avctx->err_recognition & AV_EF_EXPLODE) ||
558 ret == AVERROR(ENOMEM)) {
559 avsubtitle_free(sub);
560 return ret;
561 }
562 sub->rects[i]->w = 0;
563 sub->rects[i]->h = 0;
564 continue;
565 }
566 }
567 /* Allocate memory for colors */
568 sub->rects[i]->nb_colors = 256;
a17a7661
VG
569 sub->rects[i]->data[1] = av_mallocz(AVPALETTE_SIZE);
570 if (!sub->rects[i]->data[1]) {
253d0be6
JS
571 avsubtitle_free(sub);
572 return AVERROR(ENOMEM);
573 }
c58b82a2 574
7139489c
PH
575#if FF_API_AVPICTURE
576FF_DISABLE_DEPRECATION_WARNINGS
577 rect = sub->rects[i];
578 for (j = 0; j < 4; j++) {
579 rect->pict.data[j] = rect->data[j];
580 rect->pict.linesize[j] = rect->linesize[j];
581 }
582FF_ENABLE_DEPRECATION_WARNINGS
583#endif
584
a17a7661 585 memcpy(sub->rects[i]->data[1], palette->clut, sub->rects[i]->nb_colors * sizeof(uint32_t));
c58b82a2 586
83cd9112 587 }
c58b82a2
SB
588 return 1;
589}
590
591static int decode(AVCodecContext *avctx, void *data, int *data_size,
592 AVPacket *avpkt)
593{
594 const uint8_t *buf = avpkt->data;
595 int buf_size = avpkt->size;
596
597 const uint8_t *buf_end;
598 uint8_t segment_type;
599 int segment_length;
41ad353d 600 int i, ret;
c58b82a2 601
6a85dfc8 602 ff_dlog(avctx, "PGS sub packet:\n");
c58b82a2
SB
603
604 for (i = 0; i < buf_size; i++) {
6a85dfc8 605 ff_dlog(avctx, "%02x ", buf[i]);
c58b82a2 606 if (i % 16 == 15)
6a85dfc8 607 ff_dlog(avctx, "\n");
c58b82a2
SB
608 }
609
610 if (i & 15)
6a85dfc8 611 ff_dlog(avctx, "\n");
c58b82a2
SB
612
613 *data_size = 0;
614
615 /* Ensure that we have received at a least a segment code and segment length */
616 if (buf_size < 3)
617 return -1;
618
619 buf_end = buf + buf_size;
620
621 /* Step through buffer to identify segments */
622 while (buf < buf_end) {
623 segment_type = bytestream_get_byte(&buf);
624 segment_length = bytestream_get_be16(&buf);
625
6a85dfc8 626 ff_dlog(avctx, "Segment Length %d, Segment Type %x\n", segment_length, segment_type);
c58b82a2
SB
627
628 if (segment_type != DISPLAY_SEGMENT && segment_length > buf_end - buf)
629 break;
630
253d0be6 631 ret = 0;
c58b82a2
SB
632 switch (segment_type) {
633 case PALETTE_SEGMENT:
253d0be6 634 ret = parse_palette_segment(avctx, buf, segment_length);
c58b82a2 635 break;
253d0be6
JS
636 case OBJECT_SEGMENT:
637 ret = parse_object_segment(avctx, buf, segment_length);
c58b82a2
SB
638 break;
639 case PRESENTATION_SEGMENT:
41ad353d 640 ret = parse_presentation_segment(avctx, buf, segment_length, avpkt->pts);
c58b82a2
SB
641 break;
642 case WINDOW_SEGMENT:
643 /*
644 * Window Segment Structure (No new information provided):
ffae713a 645 * 2 bytes: Unknown,
c58b82a2
SB
646 * 2 bytes: X position of subtitle,
647 * 2 bytes: Y position of subtitle,
648 * 2 bytes: Width of subtitle,
649 * 2 bytes: Height of subtitle.
650 */
651 break;
652 case DISPLAY_SEGMENT:
253d0be6
JS
653 ret = display_end_segment(avctx, data, buf, segment_length);
654 if (ret >= 0)
655 *data_size = ret;
c58b82a2
SB
656 break;
657 default:
658 av_log(avctx, AV_LOG_ERROR, "Unknown subtitle segment type 0x%x, length %d\n",
659 segment_type, segment_length);
253d0be6 660 ret = AVERROR_INVALIDDATA;
c58b82a2
SB
661 break;
662 }
253d0be6
JS
663 if (ret < 0 && (avctx->err_recognition & AV_EF_EXPLODE))
664 return ret;
c58b82a2
SB
665
666 buf += segment_length;
667 }
668
669 return buf_size;
670}
671
d36beb3f 672AVCodec ff_pgssub_decoder = {
ec6402b7 673 .name = "pgssub",
b2bed932 674 .long_name = NULL_IF_CONFIG_SMALL("HDMV Presentation Graphic Stream subtitles"),
ec6402b7 675 .type = AVMEDIA_TYPE_SUBTITLE,
36ef5369 676 .id = AV_CODEC_ID_HDMV_PGS_SUBTITLE,
ec6402b7
AK
677 .priv_data_size = sizeof(PGSSubContext),
678 .init = init_decoder,
679 .close = close_decoder,
680 .decode = decode,
c58b82a2 681};