Make ffplay pass the packet.pos information to the filterchain, get it
[libav.git] / ffplay.c
CommitLineData
01310af2 1/*
f05ef45c 2 * FFplay : Simple Media Player based on the FFmpeg libraries
01310af2
FB
3 * Copyright (c) 2003 Fabrice Bellard
4 *
b78e7197
DB
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
01310af2
FB
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
b78e7197 10 * version 2.1 of the License, or (at your option) any later version.
01310af2 11 *
b78e7197 12 * FFmpeg is distributed in the hope that it will be useful,
01310af2
FB
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
b78e7197 18 * License along with FFmpeg; if not, write to the Free Software
5509bffa 19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
01310af2 20 */
364a9607 21
ba11257e 22#include "config.h"
8a3ceaf4 23#include <inttypes.h>
0f4e8165
RB
24#include <math.h>
25#include <limits.h>
245976da 26#include "libavutil/avstring.h"
718c7b18 27#include "libavutil/pixdesc.h"
245976da 28#include "libavformat/avformat.h"
245976da
DB
29#include "libavdevice/avdevice.h"
30#include "libswscale/swscale.h"
5a4476e2 31#include "libavcodec/audioconvert.h"
a7e6312b 32#include "libavcodec/colorspace.h"
e43d7a18 33#include "libavcodec/opt.h"
166621ab 34#include "libavcodec/avfft.h"
01310af2 35
917d2bb3
MN
36#if CONFIG_AVFILTER
37# include "libavfilter/avfilter.h"
38# include "libavfilter/avfiltergraph.h"
39# include "libavfilter/graphparser.h"
40#endif
41
01310af2
FB
42#include "cmdutils.h"
43
44#include <SDL.h>
45#include <SDL_thread.h>
46
2f30a81d 47#ifdef __MINGW32__
31319a8c
FB
48#undef main /* We don't want SDL to override our main() */
49#endif
50
d38c9e7a
MN
51#include <unistd.h>
52#include <assert.h>
53
64555bd9 54const char program_name[] = "FFplay";
ea9c581f 55const int program_birth_year = 2003;
4cfac5bc 56
638c9d91
FB
57//#define DEBUG_SYNC
58
79ee4683
MN
59#define MAX_QUEUE_SIZE (15 * 1024 * 1024)
60#define MIN_AUDIOQ_SIZE (20 * 16 * 1024)
61#define MIN_FRAMES 5
01310af2 62
638c9d91
FB
63/* SDL audio buffer size, in samples. Should be small to have precise
64 A/V sync as SDL does not have hardware buffer fullness info. */
65#define SDL_AUDIO_BUFFER_SIZE 1024
66
67/* no AV sync correction is done if below the AV sync threshold */
7e0140cb 68#define AV_SYNC_THRESHOLD 0.01
638c9d91
FB
69/* no AV correction is done if too big error */
70#define AV_NOSYNC_THRESHOLD 10.0
71
d38c9e7a
MN
72#define FRAME_SKIP_FACTOR 0.05
73
638c9d91
FB
74/* maximum audio speed change to get correct sync */
75#define SAMPLE_CORRECTION_PERCENT_MAX 10
76
77/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
78#define AUDIO_DIFF_AVG_NB 20
79
01310af2
FB
80/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
81#define SAMPLE_ARRAY_SIZE (2*65536)
82
917d2bb3 83#if !CONFIG_AVFILTER
03ae87a3 84static int sws_flags = SWS_BICUBIC;
917d2bb3 85#endif
03ae87a3 86
01310af2
FB
87typedef struct PacketQueue {
88 AVPacketList *first_pkt, *last_pkt;
89 int nb_packets;
90 int size;
91 int abort_request;
92 SDL_mutex *mutex;
93 SDL_cond *cond;
94} PacketQueue;
95
562f382c 96#define VIDEO_PICTURE_QUEUE_SIZE 2
72ce053b 97#define SUBPICTURE_QUEUE_SIZE 4
01310af2
FB
98
99typedef struct VideoPicture {
267e9dfa 100 double pts; ///<presentation time stamp for this picture
d38c9e7a 101 double target_clock; ///<av_gettime() time at which this should be displayed ideally
1a620dd7 102 int64_t pos; ///<byte position in file
01310af2
FB
103 SDL_Overlay *bmp;
104 int width, height; /* source height & width */
105 int allocated;
917d2bb3
MN
106 enum PixelFormat pix_fmt;
107
108#if CONFIG_AVFILTER
109 AVFilterPicRef *picref;
110#endif
01310af2
FB
111} VideoPicture;
112
72ce053b
IC
113typedef struct SubPicture {
114 double pts; /* presentation time stamp for this picture */
115 AVSubtitle sub;
116} SubPicture;
117
01310af2
FB
118enum {
119 AV_SYNC_AUDIO_MASTER, /* default choice */
120 AV_SYNC_VIDEO_MASTER,
638c9d91 121 AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
01310af2
FB
122};
123
124typedef struct VideoState {
125 SDL_Thread *parse_tid;
126 SDL_Thread *video_tid;
d38c9e7a 127 SDL_Thread *refresh_tid;
638c9d91 128 AVInputFormat *iformat;
01310af2
FB
129 int no_background;
130 int abort_request;
131 int paused;
416e3508 132 int last_paused;
72ea344b 133 int seek_req;
3ba1438d 134 int seek_flags;
72ea344b 135 int64_t seek_pos;
4ed29207 136 int64_t seek_rel;
f5668147 137 int read_pause_return;
01310af2
FB
138 AVFormatContext *ic;
139 int dtg_active_format;
140
141 int audio_stream;
115329f1 142
01310af2 143 int av_sync_type;
638c9d91
FB
144 double external_clock; /* external clock base */
145 int64_t external_clock_time;
115329f1 146
638c9d91
FB
147 double audio_clock;
148 double audio_diff_cum; /* used for AV difference average computation */
149 double audio_diff_avg_coef;
150 double audio_diff_threshold;
151 int audio_diff_avg_count;
01310af2
FB
152 AVStream *audio_st;
153 PacketQueue audioq;
154 int audio_hw_buf_size;
155 /* samples output by the codec. we reserve more space for avsync
156 compensation */
c6727809
MR
157 DECLARE_ALIGNED(16,uint8_t,audio_buf1)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
158 DECLARE_ALIGNED(16,uint8_t,audio_buf2)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
5a4476e2 159 uint8_t *audio_buf;
7fea94ce 160 unsigned int audio_buf_size; /* in bytes */
01310af2 161 int audio_buf_index; /* in bytes */
bea18375 162 AVPacket audio_pkt_temp;
01310af2 163 AVPacket audio_pkt;
5a4476e2
PR
164 enum SampleFormat audio_src_fmt;
165 AVAudioConvert *reformat_ctx;
115329f1 166
01310af2
FB
167 int show_audio; /* if true, display audio samples */
168 int16_t sample_array[SAMPLE_ARRAY_SIZE];
169 int sample_array_index;
5e0257e3 170 int last_i_start;
166621ab 171 RDFTContext *rdft;
12eeda34
MN
172 int rdft_bits;
173 int xpos;
115329f1 174
72ce053b
IC
175 SDL_Thread *subtitle_tid;
176 int subtitle_stream;
177 int subtitle_stream_changed;
178 AVStream *subtitle_st;
179 PacketQueue subtitleq;
180 SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
181 int subpq_size, subpq_rindex, subpq_windex;
182 SDL_mutex *subpq_mutex;
183 SDL_cond *subpq_cond;
115329f1 184
638c9d91
FB
185 double frame_timer;
186 double frame_last_pts;
187 double frame_last_delay;
115329f1 188 double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
01310af2
FB
189 int video_stream;
190 AVStream *video_st;
191 PacketQueue videoq;
267e9dfa 192 double video_current_pts; ///<current displayed pts (different from video_clock if frame fifos are used)
68aefbe8 193 double video_current_pts_drift; ///<video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
1a620dd7 194 int64_t video_current_pos; ///<current displayed file pos
01310af2
FB
195 VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
196 int pictq_size, pictq_rindex, pictq_windex;
197 SDL_mutex *pictq_mutex;
198 SDL_cond *pictq_cond;
917d2bb3 199#if !CONFIG_AVFILTER
3ac56e28 200 struct SwsContext *img_convert_ctx;
917d2bb3 201#endif
115329f1 202
01310af2
FB
203 // QETimer *video_timer;
204 char filename[1024];
205 int width, height, xleft, ytop;
41db429d
MN
206
207 int64_t faulty_pts;
208 int64_t faulty_dts;
209 int64_t last_dts_for_fault_detection;
210 int64_t last_pts_for_fault_detection;
211
917d2bb3
MN
212#if CONFIG_AVFILTER
213 AVFilterContext *out_video_filter; ///<the last filter in the video chain
214#endif
d38c9e7a
MN
215
216 float skip_frames;
217 float skip_frames_index;
218 int refresh;
01310af2
FB
219} VideoState;
220
358061f6 221static void show_help(void);
638c9d91 222static int audio_write_get_buf_size(VideoState *is);
01310af2
FB
223
224/* options specified by the user */
225static AVInputFormat *file_iformat;
226static const char *input_filename;
227static int fs_screen_width;
228static int fs_screen_height;
fccb19e3
MN
229static int screen_width = 0;
230static int screen_height = 0;
e4b89522
LW
231static int frame_width = 0;
232static int frame_height = 0;
233static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
01310af2
FB
234static int audio_disable;
235static int video_disable;
5b369983 236static int wanted_stream[CODEC_TYPE_NB]={
9f7490a0
MN
237 [CODEC_TYPE_AUDIO]=-1,
238 [CODEC_TYPE_VIDEO]=-1,
5b369983
MN
239 [CODEC_TYPE_SUBTITLE]=-1,
240};
70a4764d 241static int seek_by_bytes=-1;
01310af2 242static int display_disable;
1e1a0b18 243static int show_status = 1;
638c9d91 244static int av_sync_type = AV_SYNC_AUDIO_MASTER;
72ea344b 245static int64_t start_time = AV_NOPTS_VALUE;
e26a8335 246static int debug = 0;
0c9bbaec 247static int debug_mv = 0;
bba04f1e 248static int step = 0;
c62c07d3 249static int thread_count = 1;
6387c3e6 250static int workaround_bugs = 1;
6fc5b059 251static int fast = 0;
30bc6613 252static int genpts = 0;
178fcca8
MN
253static int lowres = 0;
254static int idct = FF_IDCT_AUTO;
8c3eba7c
MN
255static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
256static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
257static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
047599a4 258static int error_recognition = FF_ER_CAREFUL;
1b51e051 259static int error_concealment = 3;
41db429d 260static int decoder_reorder_pts= -1;
2d1653b0 261static int autoexit;
d38c9e7a 262static int framedrop=1;
2b3da32f
MN
263
264static int rdftspeed=20;
917d2bb3
MN
265#if CONFIG_AVFILTER
266static char *vfilters = NULL;
267#endif
01310af2
FB
268
269/* current context */
270static int is_full_screen;
271static VideoState *cur_stream;
5e0257e3 272static int64_t audio_callback_time;
01310af2 273
2c676c33 274static AVPacket flush_pkt;
39c6a118 275
01310af2
FB
276#define FF_ALLOC_EVENT (SDL_USEREVENT)
277#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
638c9d91 278#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
01310af2 279
2c676c33 280static SDL_Surface *screen;
01310af2 281
515bd00e
MN
282static int packet_queue_put(PacketQueue *q, AVPacket *pkt);
283
01310af2
FB
284/* packet queue handling */
285static void packet_queue_init(PacketQueue *q)
286{
287 memset(q, 0, sizeof(PacketQueue));
288 q->mutex = SDL_CreateMutex();
289 q->cond = SDL_CreateCond();
515bd00e 290 packet_queue_put(q, &flush_pkt);
01310af2
FB
291}
292
72ea344b 293static void packet_queue_flush(PacketQueue *q)
01310af2
FB
294{
295 AVPacketList *pkt, *pkt1;
296
687fae2b 297 SDL_LockMutex(q->mutex);
01310af2
FB
298 for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
299 pkt1 = pkt->next;
300 av_free_packet(&pkt->pkt);
da6c4573 301 av_freep(&pkt);
01310af2 302 }
72ea344b
FB
303 q->last_pkt = NULL;
304 q->first_pkt = NULL;
305 q->nb_packets = 0;
306 q->size = 0;
687fae2b 307 SDL_UnlockMutex(q->mutex);
72ea344b
FB
308}
309
310static void packet_queue_end(PacketQueue *q)
311{
312 packet_queue_flush(q);
01310af2
FB
313 SDL_DestroyMutex(q->mutex);
314 SDL_DestroyCond(q->cond);
315}
316
317static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
318{
319 AVPacketList *pkt1;
320
72ea344b 321 /* duplicate the packet */
39c6a118 322 if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
72ea344b 323 return -1;
115329f1 324
01310af2
FB
325 pkt1 = av_malloc(sizeof(AVPacketList));
326 if (!pkt1)
327 return -1;
328 pkt1->pkt = *pkt;
329 pkt1->next = NULL;
330
72ea344b 331
01310af2
FB
332 SDL_LockMutex(q->mutex);
333
334 if (!q->last_pkt)
335
336 q->first_pkt = pkt1;
337 else
338 q->last_pkt->next = pkt1;
339 q->last_pkt = pkt1;
340 q->nb_packets++;
7b776589 341 q->size += pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
342 /* XXX: should duplicate packet data in DV case */
343 SDL_CondSignal(q->cond);
344
345 SDL_UnlockMutex(q->mutex);
346 return 0;
347}
348
349static void packet_queue_abort(PacketQueue *q)
350{
351 SDL_LockMutex(q->mutex);
352
353 q->abort_request = 1;
115329f1 354
01310af2
FB
355 SDL_CondSignal(q->cond);
356
357 SDL_UnlockMutex(q->mutex);
358}
359
360/* return < 0 if aborted, 0 if no packet and > 0 if packet. */
361static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
362{
363 AVPacketList *pkt1;
364 int ret;
365
366 SDL_LockMutex(q->mutex);
367
368 for(;;) {
369 if (q->abort_request) {
370 ret = -1;
371 break;
372 }
115329f1 373
01310af2
FB
374 pkt1 = q->first_pkt;
375 if (pkt1) {
376 q->first_pkt = pkt1->next;
377 if (!q->first_pkt)
378 q->last_pkt = NULL;
379 q->nb_packets--;
7b776589 380 q->size -= pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
381 *pkt = pkt1->pkt;
382 av_free(pkt1);
383 ret = 1;
384 break;
385 } else if (!block) {
386 ret = 0;
387 break;
388 } else {
389 SDL_CondWait(q->cond, q->mutex);
390 }
391 }
392 SDL_UnlockMutex(q->mutex);
393 return ret;
394}
395
115329f1 396static inline void fill_rectangle(SDL_Surface *screen,
01310af2
FB
397 int x, int y, int w, int h, int color)
398{
399 SDL_Rect rect;
400 rect.x = x;
401 rect.y = y;
402 rect.w = w;
403 rect.h = h;
404 SDL_FillRect(screen, &rect, color);
405}
406
407#if 0
408/* draw only the border of a rectangle */
409void fill_border(VideoState *s, int x, int y, int w, int h, int color)
410{
411 int w1, w2, h1, h2;
412
413 /* fill the background */
414 w1 = x;
415 if (w1 < 0)
416 w1 = 0;
417 w2 = s->width - (x + w);
418 if (w2 < 0)
419 w2 = 0;
420 h1 = y;
421 if (h1 < 0)
422 h1 = 0;
423 h2 = s->height - (y + h);
424 if (h2 < 0)
425 h2 = 0;
115329f1
DB
426 fill_rectangle(screen,
427 s->xleft, s->ytop,
428 w1, s->height,
01310af2 429 color);
115329f1
DB
430 fill_rectangle(screen,
431 s->xleft + s->width - w2, s->ytop,
432 w2, s->height,
01310af2 433 color);
115329f1
DB
434 fill_rectangle(screen,
435 s->xleft + w1, s->ytop,
436 s->width - w1 - w2, h1,
01310af2 437 color);
115329f1 438 fill_rectangle(screen,
01310af2
FB
439 s->xleft + w1, s->ytop + s->height - h2,
440 s->width - w1 - w2, h2,
441 color);
442}
443#endif
444
72ce053b
IC
445#define ALPHA_BLEND(a, oldp, newp, s)\
446((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
447
448#define RGBA_IN(r, g, b, a, s)\
449{\
450 unsigned int v = ((const uint32_t *)(s))[0];\
451 a = (v >> 24) & 0xff;\
452 r = (v >> 16) & 0xff;\
453 g = (v >> 8) & 0xff;\
454 b = v & 0xff;\
455}
456
457#define YUVA_IN(y, u, v, a, s, pal)\
458{\
57cf99f2 459 unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
72ce053b
IC
460 a = (val >> 24) & 0xff;\
461 y = (val >> 16) & 0xff;\
462 u = (val >> 8) & 0xff;\
463 v = val & 0xff;\
464}
465
466#define YUVA_OUT(d, y, u, v, a)\
467{\
468 ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
469}
470
471
472#define BPP 1
473
0a8cd696 474static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
72ce053b
IC
475{
476 int wrap, wrap3, width2, skip2;
477 int y, u, v, a, u1, v1, a1, w, h;
478 uint8_t *lum, *cb, *cr;
479 const uint8_t *p;
480 const uint32_t *pal;
9cb5a11e
RD
481 int dstx, dsty, dstw, dsth;
482
7cf9c6ae
MN
483 dstw = av_clip(rect->w, 0, imgw);
484 dsth = av_clip(rect->h, 0, imgh);
485 dstx = av_clip(rect->x, 0, imgw - dstw);
486 dsty = av_clip(rect->y, 0, imgh - dsth);
9cb5a11e
RD
487 lum = dst->data[0] + dsty * dst->linesize[0];
488 cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
489 cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
490
f54b31b9 491 width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
9cb5a11e 492 skip2 = dstx >> 1;
72ce053b 493 wrap = dst->linesize[0];
25b4c651
MN
494 wrap3 = rect->pict.linesize[0];
495 p = rect->pict.data[0];
496 pal = (const uint32_t *)rect->pict.data[1]; /* Now in YCrCb! */
115329f1 497
9cb5a11e
RD
498 if (dsty & 1) {
499 lum += dstx;
72ce053b
IC
500 cb += skip2;
501 cr += skip2;
115329f1 502
9cb5a11e 503 if (dstx & 1) {
72ce053b
IC
504 YUVA_IN(y, u, v, a, p, pal);
505 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
506 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
507 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
508 cb++;
509 cr++;
510 lum++;
511 p += BPP;
512 }
9cb5a11e 513 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
514 YUVA_IN(y, u, v, a, p, pal);
515 u1 = u;
516 v1 = v;
517 a1 = a;
518 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
519
520 YUVA_IN(y, u, v, a, p + BPP, pal);
521 u1 += u;
522 v1 += v;
523 a1 += a;
524 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
525 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
526 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
527 cb++;
528 cr++;
529 p += 2 * BPP;
530 lum += 2;
531 }
532 if (w) {
533 YUVA_IN(y, u, v, a, p, pal);
534 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
535 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
536 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
676ef505
BA
537 p++;
538 lum++;
72ce053b 539 }
4606a059
BA
540 p += wrap3 - dstw * BPP;
541 lum += wrap - dstw - dstx;
72ce053b
IC
542 cb += dst->linesize[1] - width2 - skip2;
543 cr += dst->linesize[2] - width2 - skip2;
544 }
9cb5a11e
RD
545 for(h = dsth - (dsty & 1); h >= 2; h -= 2) {
546 lum += dstx;
72ce053b
IC
547 cb += skip2;
548 cr += skip2;
115329f1 549
9cb5a11e 550 if (dstx & 1) {
72ce053b
IC
551 YUVA_IN(y, u, v, a, p, pal);
552 u1 = u;
553 v1 = v;
554 a1 = a;
555 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
556 p += wrap3;
557 lum += wrap;
558 YUVA_IN(y, u, v, a, p, pal);
559 u1 += u;
560 v1 += v;
561 a1 += a;
562 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
563 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
564 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
565 cb++;
566 cr++;
567 p += -wrap3 + BPP;
568 lum += -wrap + 1;
569 }
9cb5a11e 570 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
571 YUVA_IN(y, u, v, a, p, pal);
572 u1 = u;
573 v1 = v;
574 a1 = a;
575 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
576
f8ca63e8 577 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
578 u1 += u;
579 v1 += v;
580 a1 += a;
581 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
582 p += wrap3;
583 lum += wrap;
584
585 YUVA_IN(y, u, v, a, p, pal);
586 u1 += u;
587 v1 += v;
588 a1 += a;
589 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
590
f8ca63e8 591 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
592 u1 += u;
593 v1 += v;
594 a1 += a;
595 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
596
597 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
598 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
599
600 cb++;
601 cr++;
602 p += -wrap3 + 2 * BPP;
603 lum += -wrap + 2;
604 }
605 if (w) {
606 YUVA_IN(y, u, v, a, p, pal);
607 u1 = u;
608 v1 = v;
609 a1 = a;
610 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
611 p += wrap3;
612 lum += wrap;
613 YUVA_IN(y, u, v, a, p, pal);
614 u1 += u;
615 v1 += v;
616 a1 += a;
617 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
618 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
619 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
620 cb++;
621 cr++;
622 p += -wrap3 + BPP;
623 lum += -wrap + 1;
624 }
9cb5a11e
RD
625 p += wrap3 + (wrap3 - dstw * BPP);
626 lum += wrap + (wrap - dstw - dstx);
72ce053b
IC
627 cb += dst->linesize[1] - width2 - skip2;
628 cr += dst->linesize[2] - width2 - skip2;
629 }
630 /* handle odd height */
631 if (h) {
9cb5a11e 632 lum += dstx;
72ce053b
IC
633 cb += skip2;
634 cr += skip2;
115329f1 635
9cb5a11e 636 if (dstx & 1) {
72ce053b
IC
637 YUVA_IN(y, u, v, a, p, pal);
638 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
639 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
640 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
641 cb++;
642 cr++;
643 lum++;
644 p += BPP;
645 }
9cb5a11e 646 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
647 YUVA_IN(y, u, v, a, p, pal);
648 u1 = u;
649 v1 = v;
650 a1 = a;
651 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
652
653 YUVA_IN(y, u, v, a, p + BPP, pal);
654 u1 += u;
655 v1 += v;
656 a1 += a;
657 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
658 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
659 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
660 cb++;
661 cr++;
662 p += 2 * BPP;
663 lum += 2;
664 }
665 if (w) {
666 YUVA_IN(y, u, v, a, p, pal);
667 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
668 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
669 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
670 }
671 }
672}
673
674static void free_subpicture(SubPicture *sp)
675{
676 int i;
115329f1 677
72ce053b
IC
678 for (i = 0; i < sp->sub.num_rects; i++)
679 {
25b4c651
MN
680 av_freep(&sp->sub.rects[i]->pict.data[0]);
681 av_freep(&sp->sub.rects[i]->pict.data[1]);
db4fac64 682 av_freep(&sp->sub.rects[i]);
72ce053b 683 }
115329f1 684
72ce053b 685 av_free(sp->sub.rects);
115329f1 686
72ce053b
IC
687 memset(&sp->sub, 0, sizeof(AVSubtitle));
688}
689
01310af2
FB
690static void video_image_display(VideoState *is)
691{
692 VideoPicture *vp;
72ce053b
IC
693 SubPicture *sp;
694 AVPicture pict;
01310af2
FB
695 float aspect_ratio;
696 int width, height, x, y;
697 SDL_Rect rect;
72ce053b 698 int i;
01310af2
FB
699
700 vp = &is->pictq[is->pictq_rindex];
701 if (vp->bmp) {
917d2bb3
MN
702#if CONFIG_AVFILTER
703 if (vp->picref->pixel_aspect.num == 0)
704 aspect_ratio = 0;
705 else
706 aspect_ratio = av_q2d(vp->picref->pixel_aspect);
707#else
708
01310af2 709 /* XXX: use variable in the frame */
c30a4489
AJ
710 if (is->video_st->sample_aspect_ratio.num)
711 aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
712 else if (is->video_st->codec->sample_aspect_ratio.num)
713 aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio);
72ea344b 714 else
c30a4489 715 aspect_ratio = 0;
917d2bb3 716#endif
01310af2 717 if (aspect_ratio <= 0.0)
c30a4489 718 aspect_ratio = 1.0;
917d2bb3 719 aspect_ratio *= (float)vp->width / (float)vp->height;
01310af2
FB
720 /* if an active format is indicated, then it overrides the
721 mpeg format */
722#if 0
01f4895c
MN
723 if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
724 is->dtg_active_format = is->video_st->codec->dtg_active_format;
01310af2
FB
725 printf("dtg_active_format=%d\n", is->dtg_active_format);
726 }
727#endif
728#if 0
01f4895c 729 switch(is->video_st->codec->dtg_active_format) {
01310af2
FB
730 case FF_DTG_AFD_SAME:
731 default:
732 /* nothing to do */
733 break;
734 case FF_DTG_AFD_4_3:
735 aspect_ratio = 4.0 / 3.0;
736 break;
737 case FF_DTG_AFD_16_9:
738 aspect_ratio = 16.0 / 9.0;
739 break;
740 case FF_DTG_AFD_14_9:
741 aspect_ratio = 14.0 / 9.0;
742 break;
743 case FF_DTG_AFD_4_3_SP_14_9:
744 aspect_ratio = 14.0 / 9.0;
745 break;
746 case FF_DTG_AFD_16_9_SP_14_9:
747 aspect_ratio = 14.0 / 9.0;
748 break;
749 case FF_DTG_AFD_SP_4_3:
750 aspect_ratio = 4.0 / 3.0;
751 break;
752 }
753#endif
754
72ce053b
IC
755 if (is->subtitle_st)
756 {
757 if (is->subpq_size > 0)
758 {
759 sp = &is->subpq[is->subpq_rindex];
760
761 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
762 {
763 SDL_LockYUVOverlay (vp->bmp);
764
765 pict.data[0] = vp->bmp->pixels[0];
766 pict.data[1] = vp->bmp->pixels[2];
767 pict.data[2] = vp->bmp->pixels[1];
768
769 pict.linesize[0] = vp->bmp->pitches[0];
770 pict.linesize[1] = vp->bmp->pitches[2];
771 pict.linesize[2] = vp->bmp->pitches[1];
772
773 for (i = 0; i < sp->sub.num_rects; i++)
db4fac64 774 blend_subrect(&pict, sp->sub.rects[i],
0a8cd696 775 vp->bmp->w, vp->bmp->h);
72ce053b
IC
776
777 SDL_UnlockYUVOverlay (vp->bmp);
778 }
779 }
780 }
781
782
01310af2
FB
783 /* XXX: we suppose the screen has a 1.0 pixel ratio */
784 height = is->height;
bb6c34e5 785 width = ((int)rint(height * aspect_ratio)) & ~1;
01310af2
FB
786 if (width > is->width) {
787 width = is->width;
bb6c34e5 788 height = ((int)rint(width / aspect_ratio)) & ~1;
01310af2
FB
789 }
790 x = (is->width - width) / 2;
791 y = (is->height - height) / 2;
792 if (!is->no_background) {
793 /* fill the background */
794 // fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
795 } else {
796 is->no_background = 0;
797 }
798 rect.x = is->xleft + x;
2f6547fb 799 rect.y = is->ytop + y;
01310af2
FB
800 rect.w = width;
801 rect.h = height;
802 SDL_DisplayYUVOverlay(vp->bmp, &rect);
803 } else {
804#if 0
115329f1
DB
805 fill_rectangle(screen,
806 is->xleft, is->ytop, is->width, is->height,
01310af2
FB
807 QERGB(0x00, 0x00, 0x00));
808#endif
809 }
810}
811
812static inline int compute_mod(int a, int b)
813{
814 a = a % b;
115329f1 815 if (a >= 0)
01310af2
FB
816 return a;
817 else
818 return a + b;
819}
820
821static void video_audio_display(VideoState *s)
822{
823 int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
824 int ch, channels, h, h2, bgcolor, fgcolor;
825 int16_t time_diff;
4c7c7645
MN
826 int rdft_bits, nb_freq;
827
828 for(rdft_bits=1; (1<<rdft_bits)<2*s->height; rdft_bits++)
829 ;
830 nb_freq= 1<<(rdft_bits-1);
115329f1 831
01310af2 832 /* compute display index : center on currently output samples */
01f4895c 833 channels = s->audio_st->codec->channels;
01310af2 834 nb_display_channels = channels;
5e0257e3 835 if (!s->paused) {
4c7c7645 836 int data_used= s->show_audio==1 ? s->width : (2*nb_freq);
5e0257e3
FB
837 n = 2 * channels;
838 delay = audio_write_get_buf_size(s);
839 delay /= n;
115329f1 840
5e0257e3
FB
841 /* to be more precise, we take into account the time spent since
842 the last buffer computation */
843 if (audio_callback_time) {
844 time_diff = av_gettime() - audio_callback_time;
122dcdcb 845 delay -= (time_diff * s->audio_st->codec->sample_rate) / 1000000;
5e0257e3 846 }
115329f1 847
122dcdcb 848 delay += 2*data_used;
4c7c7645
MN
849 if (delay < data_used)
850 delay = data_used;
ac50bcc8
MN
851
852 i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
12eeda34 853 if(s->show_audio==1){
6c7165c7
JM
854 h= INT_MIN;
855 for(i=0; i<1000; i+=channels){
856 int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
857 int a= s->sample_array[idx];
858 int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
859 int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
860 int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
861 int score= a-d;
862 if(h<score && (b^c)<0){
863 h= score;
864 i_start= idx;
865 }
ac50bcc8
MN
866 }
867 }
868
5e0257e3
FB
869 s->last_i_start = i_start;
870 } else {
871 i_start = s->last_i_start;
01310af2
FB
872 }
873
01310af2 874 bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 875 if(s->show_audio==1){
6c7165c7
JM
876 fill_rectangle(screen,
877 s->xleft, s->ytop, s->width, s->height,
878 bgcolor);
879
880 fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
881
882 /* total height for one channel */
883 h = s->height / nb_display_channels;
884 /* graph height / 2 */
885 h2 = (h * 9) / 20;
886 for(ch = 0;ch < nb_display_channels; ch++) {
887 i = i_start + ch;
888 y1 = s->ytop + ch * h + (h / 2); /* position of center line */
889 for(x = 0; x < s->width; x++) {
890 y = (s->sample_array[i] * h2) >> 15;
891 if (y < 0) {
892 y = -y;
893 ys = y1 - y;
894 } else {
895 ys = y1;
896 }
897 fill_rectangle(screen,
898 s->xleft + x, ys, 1, y,
899 fgcolor);
900 i += channels;
901 if (i >= SAMPLE_ARRAY_SIZE)
902 i -= SAMPLE_ARRAY_SIZE;
01310af2 903 }
01310af2 904 }
01310af2 905
6c7165c7 906 fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
01310af2 907
6c7165c7
JM
908 for(ch = 1;ch < nb_display_channels; ch++) {
909 y = s->ytop + ch * h;
910 fill_rectangle(screen,
911 s->xleft, y, s->width, 1,
912 fgcolor);
913 }
914 SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
12eeda34 915 }else{
12eeda34 916 nb_display_channels= FFMIN(nb_display_channels, 2);
12eeda34 917 if(rdft_bits != s->rdft_bits){
166621ab
MR
918 av_rdft_end(s->rdft);
919 s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
12eeda34
MN
920 s->rdft_bits= rdft_bits;
921 }
12eeda34
MN
922 {
923 FFTSample data[2][2*nb_freq];
924 for(ch = 0;ch < nb_display_channels; ch++) {
925 i = i_start + ch;
926 for(x = 0; x < 2*nb_freq; x++) {
927 double w= (x-nb_freq)*(1.0/nb_freq);
928 data[ch][x]= s->sample_array[i]*(1.0-w*w);
929 i += channels;
930 if (i >= SAMPLE_ARRAY_SIZE)
931 i -= SAMPLE_ARRAY_SIZE;
932 }
166621ab 933 av_rdft_calc(s->rdft, data[ch]);
12eeda34
MN
934 }
935 //least efficient way to do this, we should of course directly access it but its more than fast enough
092421cf 936 for(y=0; y<s->height; y++){
12eeda34
MN
937 double w= 1/sqrt(nb_freq);
938 int a= sqrt(w*sqrt(data[0][2*y+0]*data[0][2*y+0] + data[0][2*y+1]*data[0][2*y+1]));
939 int b= sqrt(w*sqrt(data[1][2*y+0]*data[1][2*y+0] + data[1][2*y+1]*data[1][2*y+1]));
940 a= FFMIN(a,255);
941 b= FFMIN(b,255);
942 fgcolor = SDL_MapRGB(screen->format, a, b, (a+b)/2);
943
944 fill_rectangle(screen,
945 s->xpos, s->height-y, 1, 1,
946 fgcolor);
947 }
948 }
949 SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
950 s->xpos++;
951 if(s->xpos >= s->width)
952 s->xpos= s->xleft;
953 }
01310af2
FB
954}
955
990c8438
MN
956static int video_open(VideoState *is){
957 int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
958 int w,h;
959
fb84155b
MN
960 if(is_full_screen) flags |= SDL_FULLSCREEN;
961 else flags |= SDL_RESIZABLE;
962
990c8438
MN
963 if (is_full_screen && fs_screen_width) {
964 w = fs_screen_width;
965 h = fs_screen_height;
fb84155b
MN
966 } else if(!is_full_screen && screen_width){
967 w = screen_width;
968 h = screen_height;
917d2bb3
MN
969#if CONFIG_AVFILTER
970 }else if (is->out_video_filter && is->out_video_filter->inputs[0]){
971 w = is->out_video_filter->inputs[0]->w;
972 h = is->out_video_filter->inputs[0]->h;
973#else
fb84155b
MN
974 }else if (is->video_st && is->video_st->codec->width){
975 w = is->video_st->codec->width;
976 h = is->video_st->codec->height;
917d2bb3 977#endif
990c8438 978 } else {
fb84155b
MN
979 w = 640;
980 h = 480;
990c8438 981 }
d3d7b12e
MN
982 if(screen && is->width == screen->w && screen->w == w
983 && is->height== screen->h && screen->h == h)
984 return 0;
985
c97f5402 986#ifndef __APPLE__
990c8438
MN
987 screen = SDL_SetVideoMode(w, h, 0, flags);
988#else
989 /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
990 screen = SDL_SetVideoMode(w, h, 24, flags);
991#endif
992 if (!screen) {
993 fprintf(stderr, "SDL: could not set video mode - exiting\n");
994 return -1;
995 }
996 SDL_WM_SetCaption("FFplay", "FFplay");
997
998 is->width = screen->w;
999 is->height = screen->h;
1000
1001 return 0;
1002}
8c982c5d 1003
01310af2
FB
1004/* display the current picture, if any */
1005static void video_display(VideoState *is)
1006{
8c982c5d
MN
1007 if(!screen)
1008 video_open(cur_stream);
115329f1 1009 if (is->audio_st && is->show_audio)
01310af2
FB
1010 video_audio_display(is);
1011 else if (is->video_st)
1012 video_image_display(is);
1013}
1014
d38c9e7a 1015static int refresh_thread(void *opaque)
01310af2 1016{
d38c9e7a
MN
1017 VideoState *is= opaque;
1018 while(!is->abort_request){
01310af2
FB
1019 SDL_Event event;
1020 event.type = FF_REFRESH_EVENT;
1021 event.user.data1 = opaque;
d38c9e7a
MN
1022 if(!is->refresh){
1023 is->refresh=1;
01310af2 1024 SDL_PushEvent(&event);
d38c9e7a 1025 }
2b3da32f 1026 usleep(is->audio_st && is->show_audio ? rdftspeed*1000 : 5000); //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
d38c9e7a
MN
1027 }
1028 return 0;
01310af2
FB
1029}
1030
638c9d91
FB
1031/* get the current audio clock value */
1032static double get_audio_clock(VideoState *is)
1033{
1034 double pts;
1035 int hw_buf_size, bytes_per_sec;
1036 pts = is->audio_clock;
1037 hw_buf_size = audio_write_get_buf_size(is);
1038 bytes_per_sec = 0;
1039 if (is->audio_st) {
115329f1 1040 bytes_per_sec = is->audio_st->codec->sample_rate *
01f4895c 1041 2 * is->audio_st->codec->channels;
638c9d91
FB
1042 }
1043 if (bytes_per_sec)
1044 pts -= (double)hw_buf_size / bytes_per_sec;
1045 return pts;
1046}
1047
1048/* get the current video clock value */
1049static double get_video_clock(VideoState *is)
1050{
04108619 1051 if (is->paused) {
41a4cd0c 1052 return is->video_current_pts;
72ea344b 1053 } else {
68aefbe8 1054 return is->video_current_pts_drift + av_gettime() / 1000000.0;
72ea344b 1055 }
638c9d91
FB
1056}
1057
1058/* get the current external clock value */
1059static double get_external_clock(VideoState *is)
1060{
1061 int64_t ti;
1062 ti = av_gettime();
1063 return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
1064}
1065
1066/* get the current master clock value */
1067static double get_master_clock(VideoState *is)
1068{
1069 double val;
1070
72ea344b
FB
1071 if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
1072 if (is->video_st)
1073 val = get_video_clock(is);
1074 else
1075 val = get_audio_clock(is);
1076 } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
1077 if (is->audio_st)
1078 val = get_audio_clock(is);
1079 else
1080 val = get_video_clock(is);
1081 } else {
638c9d91 1082 val = get_external_clock(is);
72ea344b 1083 }
638c9d91
FB
1084 return val;
1085}
1086
72ea344b 1087/* seek in the stream */
2ef46053 1088static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
72ea344b 1089{
687fae2b
IW
1090 if (!is->seek_req) {
1091 is->seek_pos = pos;
4ed29207 1092 is->seek_rel = rel;
3890dd3a 1093 is->seek_flags &= ~AVSEEK_FLAG_BYTE;
94b594c6
SH
1094 if (seek_by_bytes)
1095 is->seek_flags |= AVSEEK_FLAG_BYTE;
687fae2b
IW
1096 is->seek_req = 1;
1097 }
72ea344b
FB
1098}
1099
1100/* pause or resume the video */
1101static void stream_pause(VideoState *is)
1102{
68aefbe8
MN
1103 if (is->paused) {
1104 is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
f5668147 1105 if(is->read_pause_return != AVERROR(ENOSYS)){
68aefbe8 1106 is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
f5668147 1107 }
68aefbe8 1108 is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
72ea344b 1109 }
68aefbe8 1110 is->paused = !is->paused;
72ea344b
FB
1111}
1112
d38c9e7a 1113static double compute_target_time(double frame_current_pts, VideoState *is)
49410784 1114{
d38c9e7a 1115 double delay, sync_threshold, diff;
49410784
TB
1116
1117 /* compute nominal delay */
1118 delay = frame_current_pts - is->frame_last_pts;
1119 if (delay <= 0 || delay >= 10.0) {
1120 /* if incorrect delay, use previous one */
1121 delay = is->frame_last_delay;
443658fd 1122 } else {
712de377 1123 is->frame_last_delay = delay;
443658fd 1124 }
49410784
TB
1125 is->frame_last_pts = frame_current_pts;
1126
1127 /* update delay to follow master synchronisation source */
1128 if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
1129 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1130 /* if video is slave, we try to correct big delays by
1131 duplicating or deleting a frame */
f04c6e35 1132 diff = get_video_clock(is) - get_master_clock(is);
49410784
TB
1133
1134 /* skip or repeat frame. We take into account the
1135 delay to compute the threshold. I still don't know
1136 if it is the best guess */
1137 sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
1138 if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1139 if (diff <= -sync_threshold)
1140 delay = 0;
1141 else if (diff >= sync_threshold)
1142 delay = 2 * delay;
1143 }
1144 }
49410784 1145 is->frame_timer += delay;
eecc17a7
TB
1146#if defined(DEBUG_SYNC)
1147 printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1148 delay, actual_delay, frame_current_pts, -diff);
1149#endif
1150
d38c9e7a 1151 return is->frame_timer;
49410784
TB
1152}
1153
01310af2
FB
1154/* called to display each frame */
1155static void video_refresh_timer(void *opaque)
1156{
1157 VideoState *is = opaque;
1158 VideoPicture *vp;
638c9d91 1159
72ce053b 1160 SubPicture *sp, *sp2;
01310af2
FB
1161
1162 if (is->video_st) {
d38c9e7a 1163retry:
01310af2 1164 if (is->pictq_size == 0) {
d38c9e7a 1165 //nothing to do, no picture to display in the que
01310af2 1166 } else {
d38c9e7a
MN
1167 double time= av_gettime()/1000000.0;
1168 double next_target;
638c9d91 1169 /* dequeue the picture */
01310af2 1170 vp = &is->pictq[is->pictq_rindex];
638c9d91 1171
d38c9e7a
MN
1172 if(time < vp->target_clock)
1173 return;
638c9d91
FB
1174 /* update current video pts */
1175 is->video_current_pts = vp->pts;
d38c9e7a 1176 is->video_current_pts_drift = is->video_current_pts - time;
a3cc2160 1177 is->video_current_pos = vp->pos;
d38c9e7a
MN
1178 if(is->pictq_size > 1){
1179 VideoPicture *nextvp= &is->pictq[(is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE];
1180 assert(nextvp->target_clock >= vp->target_clock);
1181 next_target= nextvp->target_clock;
1182 }else{
1183 next_target= vp->target_clock + is->video_clock - vp->pts; //FIXME pass durations cleanly
1184 }
1185 if(framedrop && time > next_target){
1186 is->skip_frames *= 1.0 + FRAME_SKIP_FACTOR;
1187 if(is->pictq_size > 1 || time > next_target + 0.5){
1188 /* update queue size and signal for next picture */
1189 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1190 is->pictq_rindex = 0;
1191
1192 SDL_LockMutex(is->pictq_mutex);
1193 is->pictq_size--;
1194 SDL_CondSignal(is->pictq_cond);
1195 SDL_UnlockMutex(is->pictq_mutex);
1196 goto retry;
1197 }
1198 }
638c9d91 1199
72ce053b
IC
1200 if(is->subtitle_st) {
1201 if (is->subtitle_stream_changed) {
1202 SDL_LockMutex(is->subpq_mutex);
115329f1 1203
72ce053b
IC
1204 while (is->subpq_size) {
1205 free_subpicture(&is->subpq[is->subpq_rindex]);
115329f1 1206
72ce053b
IC
1207 /* update queue size and signal for next picture */
1208 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1209 is->subpq_rindex = 0;
115329f1 1210
72ce053b
IC
1211 is->subpq_size--;
1212 }
1213 is->subtitle_stream_changed = 0;
1214
1215 SDL_CondSignal(is->subpq_cond);
1216 SDL_UnlockMutex(is->subpq_mutex);
1217 } else {
1218 if (is->subpq_size > 0) {
1219 sp = &is->subpq[is->subpq_rindex];
1220
1221 if (is->subpq_size > 1)
1222 sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1223 else
1224 sp2 = NULL;
1225
1226 if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1227 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1228 {
1229 free_subpicture(sp);
1230
1231 /* update queue size and signal for next picture */
1232 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1233 is->subpq_rindex = 0;
1234
1235 SDL_LockMutex(is->subpq_mutex);
1236 is->subpq_size--;
1237 SDL_CondSignal(is->subpq_cond);
1238 SDL_UnlockMutex(is->subpq_mutex);
1239 }
1240 }
1241 }
1242 }
1243
01310af2
FB
1244 /* display picture */
1245 video_display(is);
115329f1 1246
01310af2
FB
1247 /* update queue size and signal for next picture */
1248 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1249 is->pictq_rindex = 0;
115329f1 1250
01310af2
FB
1251 SDL_LockMutex(is->pictq_mutex);
1252 is->pictq_size--;
1253 SDL_CondSignal(is->pictq_cond);
1254 SDL_UnlockMutex(is->pictq_mutex);
1255 }
1256 } else if (is->audio_st) {
1257 /* draw the next audio frame */
1258
01310af2
FB
1259 /* if only audio stream, then display the audio bars (better
1260 than nothing, just to test the implementation */
115329f1 1261
01310af2
FB
1262 /* display picture */
1263 video_display(is);
01310af2
FB
1264 }
1265 if (show_status) {
1266 static int64_t last_time;
1267 int64_t cur_time;
72ce053b 1268 int aqsize, vqsize, sqsize;
638c9d91 1269 double av_diff;
115329f1 1270
01310af2 1271 cur_time = av_gettime();
1e1a0b18 1272 if (!last_time || (cur_time - last_time) >= 30000) {
01310af2
FB
1273 aqsize = 0;
1274 vqsize = 0;
72ce053b 1275 sqsize = 0;
01310af2
FB
1276 if (is->audio_st)
1277 aqsize = is->audioq.size;
1278 if (is->video_st)
1279 vqsize = is->videoq.size;
72ce053b
IC
1280 if (is->subtitle_st)
1281 sqsize = is->subtitleq.size;
638c9d91
FB
1282 av_diff = 0;
1283 if (is->audio_st && is->video_st)
1284 av_diff = get_audio_clock(is) - get_video_clock(is);
382f3a5b
MN
1285 printf("%7.2f A-V:%7.3f s:%3.1f aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
1286 get_master_clock(is), av_diff, FFMAX(is->skip_frames-1, 0), aqsize / 1024, vqsize / 1024, sqsize, is->faulty_dts, is->faulty_pts);
01310af2
FB
1287 fflush(stdout);
1288 last_time = cur_time;
1289 }
1290 }
1291}
1292
1293/* allocate a picture (needs to do that in main thread to avoid
1294 potential locking problems */
1295static void alloc_picture(void *opaque)
1296{
1297 VideoState *is = opaque;
1298 VideoPicture *vp;
01310af2
FB
1299
1300 vp = &is->pictq[is->pictq_windex];
1301
1302 if (vp->bmp)
1303 SDL_FreeYUVOverlay(vp->bmp);
1304
917d2bb3
MN
1305#if CONFIG_AVFILTER
1306 if (vp->picref)
1307 avfilter_unref_pic(vp->picref);
1308 vp->picref = NULL;
1309
1310 vp->width = is->out_video_filter->inputs[0]->w;
1311 vp->height = is->out_video_filter->inputs[0]->h;
1312 vp->pix_fmt = is->out_video_filter->inputs[0]->format;
1313#else
1314 vp->width = is->video_st->codec->width;
1315 vp->height = is->video_st->codec->height;
1316 vp->pix_fmt = is->video_st->codec->pix_fmt;
1317#endif
1318
1319 vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
115329f1 1320 SDL_YV12_OVERLAY,
61890b02 1321 screen);
01310af2
FB
1322
1323 SDL_LockMutex(is->pictq_mutex);
1324 vp->allocated = 1;
1325 SDL_CondSignal(is->pictq_cond);
1326 SDL_UnlockMutex(is->pictq_mutex);
1327}
1328
267e9dfa
MN
1329/**
1330 *
1331 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1332 */
1a620dd7 1333static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos)
01310af2
FB
1334{
1335 VideoPicture *vp;
1336 int dst_pix_fmt;
917d2bb3
MN
1337#if CONFIG_AVFILTER
1338 AVPicture pict_src;
1339#endif
01310af2
FB
1340 /* wait until we have space to put a new picture */
1341 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1342
1343 if(is->pictq_size>=VIDEO_PICTURE_QUEUE_SIZE && !is->refresh)
1344 is->skip_frames= FFMAX(1.0 - FRAME_SKIP_FACTOR, is->skip_frames * (1.0-FRAME_SKIP_FACTOR));
1345
01310af2
FB
1346 while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1347 !is->videoq.abort_request) {
1348 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1349 }
1350 SDL_UnlockMutex(is->pictq_mutex);
115329f1 1351
01310af2
FB
1352 if (is->videoq.abort_request)
1353 return -1;
1354
1355 vp = &is->pictq[is->pictq_windex];
1356
1357 /* alloc or resize hardware picture buffer */
115329f1 1358 if (!vp->bmp ||
917d2bb3
MN
1359#if CONFIG_AVFILTER
1360 vp->width != is->out_video_filter->inputs[0]->w ||
1361 vp->height != is->out_video_filter->inputs[0]->h) {
1362#else
01f4895c
MN
1363 vp->width != is->video_st->codec->width ||
1364 vp->height != is->video_st->codec->height) {
917d2bb3 1365#endif
01310af2
FB
1366 SDL_Event event;
1367
1368 vp->allocated = 0;
1369
1370 /* the allocation must be done in the main thread to avoid
1371 locking problems */
1372 event.type = FF_ALLOC_EVENT;
1373 event.user.data1 = is;
1374 SDL_PushEvent(&event);
115329f1 1375
01310af2
FB
1376 /* wait until the picture is allocated */
1377 SDL_LockMutex(is->pictq_mutex);
1378 while (!vp->allocated && !is->videoq.abort_request) {
1379 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1380 }
1381 SDL_UnlockMutex(is->pictq_mutex);
1382
1383 if (is->videoq.abort_request)
1384 return -1;
1385 }
1386
638c9d91 1387 /* if the frame is not skipped, then display it */
01310af2 1388 if (vp->bmp) {
fbf1b885 1389 AVPicture pict;
917d2bb3
MN
1390#if CONFIG_AVFILTER
1391 if(vp->picref)
1392 avfilter_unref_pic(vp->picref);
1393 vp->picref = src_frame->opaque;
1394#endif
fbf1b885 1395
01310af2
FB
1396 /* get a pointer on the bitmap */
1397 SDL_LockYUVOverlay (vp->bmp);
1398
1399 dst_pix_fmt = PIX_FMT_YUV420P;
fbf1b885 1400 memset(&pict,0,sizeof(AVPicture));
01310af2
FB
1401 pict.data[0] = vp->bmp->pixels[0];
1402 pict.data[1] = vp->bmp->pixels[2];
1403 pict.data[2] = vp->bmp->pixels[1];
1404
1405 pict.linesize[0] = vp->bmp->pitches[0];
1406 pict.linesize[1] = vp->bmp->pitches[2];
1407 pict.linesize[2] = vp->bmp->pitches[1];
917d2bb3
MN
1408
1409#if CONFIG_AVFILTER
1410 pict_src.data[0] = src_frame->data[0];
1411 pict_src.data[1] = src_frame->data[1];
1412 pict_src.data[2] = src_frame->data[2];
1413
1414 pict_src.linesize[0] = src_frame->linesize[0];
1415 pict_src.linesize[1] = src_frame->linesize[1];
1416 pict_src.linesize[2] = src_frame->linesize[2];
1417
1418 //FIXME use direct rendering
1419 av_picture_copy(&pict, &pict_src,
1420 vp->pix_fmt, vp->width, vp->height);
1421#else
e43d7a18 1422 sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
3ac56e28 1423 is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
917d2bb3 1424 vp->width, vp->height, vp->pix_fmt, vp->width, vp->height,
feb7bc67 1425 dst_pix_fmt, sws_flags, NULL, NULL, NULL);
3ac56e28 1426 if (is->img_convert_ctx == NULL) {
26ba8235
AB
1427 fprintf(stderr, "Cannot initialize the conversion context\n");
1428 exit(1);
1429 }
3ac56e28 1430 sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
917d2bb3
MN
1431 0, vp->height, pict.data, pict.linesize);
1432#endif
01310af2
FB
1433 /* update the bitmap content */
1434 SDL_UnlockYUVOverlay(vp->bmp);
1435
638c9d91 1436 vp->pts = pts;
1a620dd7 1437 vp->pos = pos;
01310af2
FB
1438
1439 /* now we can update the picture count */
1440 if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1441 is->pictq_windex = 0;
1442 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1443 vp->target_clock= compute_target_time(vp->pts, is);
1444
01310af2
FB
1445 is->pictq_size++;
1446 SDL_UnlockMutex(is->pictq_mutex);
1447 }
638c9d91
FB
1448 return 0;
1449}
1450
115329f1
DB
1451/**
1452 * compute the exact PTS for the picture if it is omitted in the stream
267e9dfa
MN
1453 * @param pts1 the dts of the pkt / pts of the frame
1454 */
1a620dd7 1455static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos)
638c9d91
FB
1456{
1457 double frame_delay, pts;
115329f1 1458
638c9d91
FB
1459 pts = pts1;
1460
01310af2 1461 if (pts != 0) {
638c9d91 1462 /* update video clock with pts, if present */
01310af2
FB
1463 is->video_clock = pts;
1464 } else {
72ea344b
FB
1465 pts = is->video_clock;
1466 }
1467 /* update video clock for next frame */
01f4895c 1468 frame_delay = av_q2d(is->video_st->codec->time_base);
72ea344b
FB
1469 /* for MPEG2, the frame can be repeated, so we update the
1470 clock accordingly */
267e9dfa 1471 frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
72ea344b 1472 is->video_clock += frame_delay;
638c9d91
FB
1473
1474#if defined(DEBUG_SYNC) && 0
ff358eca
SS
1475 printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1476 av_get_pict_type_char(src_frame->pict_type), pts, pts1);
638c9d91 1477#endif
1a620dd7 1478 return queue_picture(is, src_frame, pts, pos);
01310af2
FB
1479}
1480
3966a574 1481static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt)
01310af2 1482{
6c7d3ead 1483 int len1, got_picture, i;
01310af2 1484
01310af2 1485 if (packet_queue_get(&is->videoq, pkt, 1) < 0)
917d2bb3 1486 return -1;
39c6a118
MN
1487
1488 if(pkt->data == flush_pkt.data){
1489 avcodec_flush_buffers(is->video_st->codec);
6c7d3ead
MN
1490
1491 SDL_LockMutex(is->pictq_mutex);
1492 //Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
1493 for(i=0; i<VIDEO_PICTURE_QUEUE_SIZE; i++){
d38c9e7a 1494 is->pictq[i].target_clock= 0;
6c7d3ead
MN
1495 }
1496 while (is->pictq_size && !is->videoq.abort_request) {
1497 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1498 }
1a620dd7 1499 is->video_current_pos= -1;
6c7d3ead
MN
1500 SDL_UnlockMutex(is->pictq_mutex);
1501
41db429d
MN
1502 is->last_dts_for_fault_detection=
1503 is->last_pts_for_fault_detection= INT64_MIN;
967030eb 1504 is->frame_last_pts= AV_NOPTS_VALUE;
f7119e42 1505 is->frame_last_delay = 0;
b25453bd 1506 is->frame_timer = (double)av_gettime() / 1000000.0;
d38c9e7a
MN
1507 is->skip_frames= 1;
1508 is->skip_frames_index= 0;
917d2bb3 1509 return 0;
39c6a118
MN
1510 }
1511
638c9d91
FB
1512 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1513 this packet, if any */
7fb262b5 1514 is->video_st->codec->reordered_opaque= pkt->pts;
bea18375 1515 len1 = avcodec_decode_video2(is->video_st->codec,
620e8baf 1516 frame, &got_picture,
bea18375 1517 pkt);
620e8baf 1518
99e0b12b 1519 if (got_picture) {
df7d6e48
SS
1520 if(pkt->dts != AV_NOPTS_VALUE){
1521 is->faulty_dts += pkt->dts <= is->last_dts_for_fault_detection;
1522 is->last_dts_for_fault_detection= pkt->dts;
1523 }
1524 if(frame->reordered_opaque != AV_NOPTS_VALUE){
1525 is->faulty_pts += frame->reordered_opaque <= is->last_pts_for_fault_detection;
1526 is->last_pts_for_fault_detection= frame->reordered_opaque;
1527 }
99e0b12b 1528 }
41db429d
MN
1529
1530 if( ( decoder_reorder_pts==1
ecbed31c 1531 || (decoder_reorder_pts && is->faulty_pts<is->faulty_dts)
41db429d 1532 || pkt->dts == AV_NOPTS_VALUE)
7fb262b5 1533 && frame->reordered_opaque != AV_NOPTS_VALUE)
917d2bb3 1534 *pts= frame->reordered_opaque;
620e8baf 1535 else if(pkt->dts != AV_NOPTS_VALUE)
917d2bb3 1536 *pts= pkt->dts;
620e8baf 1537 else
917d2bb3
MN
1538 *pts= 0;
1539
fb966f99
MN
1540// if (len1 < 0)
1541// break;
d38c9e7a
MN
1542 if (got_picture){
1543 is->skip_frames_index += 1;
1544 if(is->skip_frames_index >= is->skip_frames){
1545 is->skip_frames_index -= FFMAX(is->skip_frames, 1.0);
1546 return 1;
1547 }
1548
1549 }
917d2bb3
MN
1550 return 0;
1551}
1552
1553#if CONFIG_AVFILTER
1554typedef struct {
1555 VideoState *is;
1556 AVFrame *frame;
1557} FilterPriv;
1558
1559static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
1560{
1561 FilterPriv *priv = ctx->priv;
1562 if(!opaque) return -1;
1563
1564 priv->is = opaque;
1565 priv->frame = avcodec_alloc_frame();
1566
1567 return 0;
1568}
1569
1570static void input_uninit(AVFilterContext *ctx)
1571{
1572 FilterPriv *priv = ctx->priv;
1573 av_free(priv->frame);
1574}
1575
1576static int input_request_frame(AVFilterLink *link)
1577{
1578 FilterPriv *priv = link->src->priv;
1579 AVFilterPicRef *picref;
3966a574 1580 int64_t pts = 0;
917d2bb3
MN
1581 AVPacket pkt;
1582 int ret;
1583
1584 while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
1585 av_free_packet(&pkt);
1586 if (ret < 0)
1587 return -1;
1588
1589 /* FIXME: until I figure out how to hook everything up to the codec
1590 * right, we're just copying the entire frame. */
1591 picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, link->w, link->h);
1592 av_picture_copy((AVPicture *)&picref->data, (AVPicture *)priv->frame,
1593 picref->pic->format, link->w, link->h);
1594 av_free_packet(&pkt);
1595
1596 picref->pts = pts;
bb409513 1597 picref->pos = pkt.pos;
917d2bb3
MN
1598 picref->pixel_aspect = priv->is->video_st->codec->sample_aspect_ratio;
1599 avfilter_start_frame(link, avfilter_ref_pic(picref, ~0));
1600 avfilter_draw_slice(link, 0, link->h, 1);
1601 avfilter_end_frame(link);
1602 avfilter_unref_pic(picref);
1603
1604 return 0;
1605}
1606
1607static int input_query_formats(AVFilterContext *ctx)
1608{
1609 FilterPriv *priv = ctx->priv;
1610 enum PixelFormat pix_fmts[] = {
1611 priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
1612 };
1613
1614 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1615 return 0;
1616}
1617
1618static int input_config_props(AVFilterLink *link)
1619{
1620 FilterPriv *priv = link->src->priv;
1621 AVCodecContext *c = priv->is->video_st->codec;
1622
1623 link->w = c->width;
1624 link->h = c->height;
1625
1626 return 0;
1627}
1628
1629static AVFilter input_filter =
1630{
1631 .name = "ffplay_input",
1632
1633 .priv_size = sizeof(FilterPriv),
1634
1635 .init = input_init,
1636 .uninit = input_uninit,
1637
1638 .query_formats = input_query_formats,
1639
1640 .inputs = (AVFilterPad[]) {{ .name = NULL }},
1641 .outputs = (AVFilterPad[]) {{ .name = "default",
1642 .type = CODEC_TYPE_VIDEO,
1643 .request_frame = input_request_frame,
1644 .config_props = input_config_props, },
1645 { .name = NULL }},
1646};
1647
1648static void output_end_frame(AVFilterLink *link)
1649{
1650}
1651
1652static int output_query_formats(AVFilterContext *ctx)
1653{
1654 enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
1655
1656 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1657 return 0;
1658}
1659
1660static int get_filtered_video_frame(AVFilterContext *ctx, AVFrame *frame,
bb409513 1661 int64_t *pts, int64_t *pos)
917d2bb3
MN
1662{
1663 AVFilterPicRef *pic;
1664
1665 if(avfilter_request_frame(ctx->inputs[0]))
1666 return -1;
1667 if(!(pic = ctx->inputs[0]->cur_pic))
1668 return -1;
1669 ctx->inputs[0]->cur_pic = NULL;
1670
1671 frame->opaque = pic;
1672 *pts = pic->pts;
bb409513 1673 *pos = pic->pos;
917d2bb3
MN
1674
1675 memcpy(frame->data, pic->data, sizeof(frame->data));
1676 memcpy(frame->linesize, pic->linesize, sizeof(frame->linesize));
1677
1678 return 1;
1679}
1680
1681static AVFilter output_filter =
1682{
1683 .name = "ffplay_output",
1684
1685 .query_formats = output_query_formats,
1686
1687 .inputs = (AVFilterPad[]) {{ .name = "default",
1688 .type = CODEC_TYPE_VIDEO,
1689 .end_frame = output_end_frame,
1690 .min_perms = AV_PERM_READ, },
1691 { .name = NULL }},
1692 .outputs = (AVFilterPad[]) {{ .name = NULL }},
1693};
1694#endif /* CONFIG_AVFILTER */
1695
1696static int video_thread(void *arg)
1697{
1698 VideoState *is = arg;
1699 AVFrame *frame= avcodec_alloc_frame();
bb409513 1700 int64_t pts_int, pos;
917d2bb3
MN
1701 double pts;
1702 int ret;
1703
1704#if CONFIG_AVFILTER
1705 AVFilterContext *filt_src = NULL, *filt_out = NULL;
1706 AVFilterGraph *graph = av_mallocz(sizeof(AVFilterGraph));
1707 graph->scale_sws_opts = av_strdup("sws_flags=bilinear");
1708
1709 if(!(filt_src = avfilter_open(&input_filter, "src"))) goto the_end;
1710 if(!(filt_out = avfilter_open(&output_filter, "out"))) goto the_end;
1711
1712 if(avfilter_init_filter(filt_src, NULL, is)) goto the_end;
1713 if(avfilter_init_filter(filt_out, NULL, frame)) goto the_end;
1714
1715
1716 if(vfilters) {
1717 AVFilterInOut *outputs = av_malloc(sizeof(AVFilterInOut));
1718 AVFilterInOut *inputs = av_malloc(sizeof(AVFilterInOut));
1719
1720 outputs->name = av_strdup("in");
1721 outputs->filter = filt_src;
1722 outputs->pad_idx = 0;
1723 outputs->next = NULL;
1724
1725 inputs->name = av_strdup("out");
1726 inputs->filter = filt_out;
1727 inputs->pad_idx = 0;
1728 inputs->next = NULL;
1729
1730 if (avfilter_graph_parse(graph, vfilters, inputs, outputs, NULL) < 0)
1731 goto the_end;
1732 av_freep(&vfilters);
1733 } else {
1734 if(avfilter_link(filt_src, 0, filt_out, 0) < 0) goto the_end;
1735 }
1736 avfilter_graph_add_filter(graph, filt_src);
1737 avfilter_graph_add_filter(graph, filt_out);
1738
1739 if(avfilter_graph_check_validity(graph, NULL)) goto the_end;
1740 if(avfilter_graph_config_formats(graph, NULL)) goto the_end;
1741 if(avfilter_graph_config_links(graph, NULL)) goto the_end;
1742
1743 is->out_video_filter = filt_out;
1744#endif
1745
1746 for(;;) {
1747#if !CONFIG_AVFILTER
1748 AVPacket pkt;
1749#endif
1750 while (is->paused && !is->videoq.abort_request)
1751 SDL_Delay(10);
1752#if CONFIG_AVFILTER
bb409513 1753 ret = get_filtered_video_frame(filt_out, frame, &pts_int, &pos);
917d2bb3
MN
1754#else
1755 ret = get_video_frame(is, frame, &pts_int, &pkt);
1756#endif
1757
1758 if (ret < 0) goto the_end;
1759
1760 if (!ret)
1761 continue;
1762
3966a574 1763 pts = pts_int*av_q2d(is->video_st->time_base);
917d2bb3
MN
1764
1765#if CONFIG_AVFILTER
bb409513 1766 ret = output_picture2(is, frame, pts, pos);
917d2bb3 1767#else
fca62599 1768 ret = output_picture2(is, frame, pts, pkt.pos);
917d2bb3
MN
1769 av_free_packet(&pkt);
1770#endif
1771 if (ret < 0)
1772 goto the_end;
1773
115329f1 1774 if (step)
bba04f1e
WH
1775 if (cur_stream)
1776 stream_pause(cur_stream);
01310af2
FB
1777 }
1778 the_end:
917d2bb3
MN
1779#if CONFIG_AVFILTER
1780 avfilter_graph_destroy(graph);
1781 av_freep(&graph);
1782#endif
c6b1edc9 1783 av_free(frame);
01310af2
FB
1784 return 0;
1785}
1786
72ce053b
IC
1787static int subtitle_thread(void *arg)
1788{
1789 VideoState *is = arg;
1790 SubPicture *sp;
1791 AVPacket pkt1, *pkt = &pkt1;
1792 int len1, got_subtitle;
1793 double pts;
1794 int i, j;
1795 int r, g, b, y, u, v, a;
1796
1797 for(;;) {
1798 while (is->paused && !is->subtitleq.abort_request) {
1799 SDL_Delay(10);
1800 }
1801 if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1802 break;
115329f1 1803
39c6a118
MN
1804 if(pkt->data == flush_pkt.data){
1805 avcodec_flush_buffers(is->subtitle_st->codec);
1806 continue;
1807 }
72ce053b
IC
1808 SDL_LockMutex(is->subpq_mutex);
1809 while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1810 !is->subtitleq.abort_request) {
1811 SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1812 }
1813 SDL_UnlockMutex(is->subpq_mutex);
115329f1 1814
72ce053b
IC
1815 if (is->subtitleq.abort_request)
1816 goto the_end;
115329f1 1817
72ce053b
IC
1818 sp = &is->subpq[is->subpq_windex];
1819
1820 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1821 this packet, if any */
1822 pts = 0;
1823 if (pkt->pts != AV_NOPTS_VALUE)
1824 pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1825
bea18375 1826 len1 = avcodec_decode_subtitle2(is->subtitle_st->codec,
115329f1 1827 &sp->sub, &got_subtitle,
bea18375 1828 pkt);
72ce053b
IC
1829// if (len1 < 0)
1830// break;
1831 if (got_subtitle && sp->sub.format == 0) {
1832 sp->pts = pts;
115329f1 1833
72ce053b
IC
1834 for (i = 0; i < sp->sub.num_rects; i++)
1835 {
db4fac64 1836 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
72ce053b 1837 {
25b4c651 1838 RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
72ce053b
IC
1839 y = RGB_TO_Y_CCIR(r, g, b);
1840 u = RGB_TO_U_CCIR(r, g, b, 0);
1841 v = RGB_TO_V_CCIR(r, g, b, 0);
25b4c651 1842 YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
72ce053b
IC
1843 }
1844 }
1845
1846 /* now we can update the picture count */
1847 if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1848 is->subpq_windex = 0;
1849 SDL_LockMutex(is->subpq_mutex);
1850 is->subpq_size++;
1851 SDL_UnlockMutex(is->subpq_mutex);
1852 }
1853 av_free_packet(pkt);
115329f1 1854// if (step)
72ce053b
IC
1855// if (cur_stream)
1856// stream_pause(cur_stream);
1857 }
1858 the_end:
1859 return 0;
1860}
1861
01310af2
FB
1862/* copy samples for viewing in editor window */
1863static void update_sample_display(VideoState *is, short *samples, int samples_size)
1864{
1865 int size, len, channels;
1866
01f4895c 1867 channels = is->audio_st->codec->channels;
01310af2
FB
1868
1869 size = samples_size / sizeof(short);
1870 while (size > 0) {
1871 len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1872 if (len > size)
1873 len = size;
1874 memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1875 samples += len;
1876 is->sample_array_index += len;
1877 if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1878 is->sample_array_index = 0;
1879 size -= len;
1880 }
1881}
1882
01310af2
FB
1883/* return the new audio buffer size (samples can be added or deleted
1884 to get better sync if video or external master clock) */
115329f1 1885static int synchronize_audio(VideoState *is, short *samples,
638c9d91 1886 int samples_size1, double pts)
01310af2 1887{
638c9d91 1888 int n, samples_size;
01310af2 1889 double ref_clock;
115329f1 1890
01f4895c 1891 n = 2 * is->audio_st->codec->channels;
638c9d91 1892 samples_size = samples_size1;
01310af2 1893
01310af2 1894 /* if not master, then we try to remove or add samples to correct the clock */
01310af2 1895 if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
638c9d91
FB
1896 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1897 double diff, avg_diff;
01310af2 1898 int wanted_size, min_size, max_size, nb_samples;
115329f1 1899
638c9d91
FB
1900 ref_clock = get_master_clock(is);
1901 diff = get_audio_clock(is) - ref_clock;
115329f1 1902
638c9d91
FB
1903 if (diff < AV_NOSYNC_THRESHOLD) {
1904 is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
1905 if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
1906 /* not enough measures to have a correct estimate */
1907 is->audio_diff_avg_count++;
1908 } else {
1909 /* estimate the A-V difference */
1910 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
1911
1912 if (fabs(avg_diff) >= is->audio_diff_threshold) {
01f4895c 1913 wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
638c9d91 1914 nb_samples = samples_size / n;
115329f1 1915
638c9d91
FB
1916 min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1917 max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1918 if (wanted_size < min_size)
1919 wanted_size = min_size;
1920 else if (wanted_size > max_size)
1921 wanted_size = max_size;
115329f1 1922
638c9d91
FB
1923 /* add or remove samples to correction the synchro */
1924 if (wanted_size < samples_size) {
1925 /* remove samples */
1926 samples_size = wanted_size;
1927 } else if (wanted_size > samples_size) {
1928 uint8_t *samples_end, *q;
1929 int nb;
115329f1 1930
638c9d91
FB
1931 /* add samples */
1932 nb = (samples_size - wanted_size);
1933 samples_end = (uint8_t *)samples + samples_size - n;
1934 q = samples_end + n;
1935 while (nb > 0) {
1936 memcpy(q, samples_end, n);
1937 q += n;
1938 nb -= n;
1939 }
1940 samples_size = wanted_size;
1941 }
1942 }
1943#if 0
115329f1
DB
1944 printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
1945 diff, avg_diff, samples_size - samples_size1,
638c9d91
FB
1946 is->audio_clock, is->video_clock, is->audio_diff_threshold);
1947#endif
01310af2 1948 }
638c9d91
FB
1949 } else {
1950 /* too big difference : may be initial PTS errors, so
1951 reset A-V filter */
1952 is->audio_diff_avg_count = 0;
1953 is->audio_diff_cum = 0;
01310af2
FB
1954 }
1955 }
1956
01310af2
FB
1957 return samples_size;
1958}
1959
1960/* decode one audio frame and returns its uncompressed size */
5a4476e2 1961static int audio_decode_frame(VideoState *is, double *pts_ptr)
01310af2 1962{
bea18375 1963 AVPacket *pkt_temp = &is->audio_pkt_temp;
01310af2 1964 AVPacket *pkt = &is->audio_pkt;
abdff646 1965 AVCodecContext *dec= is->audio_st->codec;
72ea344b 1966 int n, len1, data_size;
01310af2
FB
1967 double pts;
1968
1969 for(;;) {
72ea344b 1970 /* NOTE: the audio packet can contain several frames */
bea18375 1971 while (pkt_temp->size > 0) {
5a4476e2 1972 data_size = sizeof(is->audio_buf1);
bea18375 1973 len1 = avcodec_decode_audio3(dec,
5a4476e2 1974 (int16_t *)is->audio_buf1, &data_size,
bea18375 1975 pkt_temp);
72ea344b
FB
1976 if (len1 < 0) {
1977 /* if error, we skip the frame */
bea18375 1978 pkt_temp->size = 0;
01310af2 1979 break;
72ea344b 1980 }
115329f1 1981
bea18375
TB
1982 pkt_temp->data += len1;
1983 pkt_temp->size -= len1;
72ea344b
FB
1984 if (data_size <= 0)
1985 continue;
5a4476e2
PR
1986
1987 if (dec->sample_fmt != is->audio_src_fmt) {
1988 if (is->reformat_ctx)
1989 av_audio_convert_free(is->reformat_ctx);
1990 is->reformat_ctx= av_audio_convert_alloc(SAMPLE_FMT_S16, 1,
1991 dec->sample_fmt, 1, NULL, 0);
1992 if (!is->reformat_ctx) {
1993 fprintf(stderr, "Cannot convert %s sample format to %s sample format\n",
1994 avcodec_get_sample_fmt_name(dec->sample_fmt),
1995 avcodec_get_sample_fmt_name(SAMPLE_FMT_S16));
1996 break;
1997 }
1998 is->audio_src_fmt= dec->sample_fmt;
1999 }
2000
2001 if (is->reformat_ctx) {
2002 const void *ibuf[6]= {is->audio_buf1};
2003 void *obuf[6]= {is->audio_buf2};
2004 int istride[6]= {av_get_bits_per_sample_format(dec->sample_fmt)/8};
2005 int ostride[6]= {2};
2006 int len= data_size/istride[0];
2007 if (av_audio_convert(is->reformat_ctx, obuf, ostride, ibuf, istride, len)<0) {
2008 printf("av_audio_convert() failed\n");
2009 break;
2010 }
2011 is->audio_buf= is->audio_buf2;
2012 /* FIXME: existing code assume that data_size equals framesize*channels*2
2013 remove this legacy cruft */
2014 data_size= len*2;
2015 }else{
2016 is->audio_buf= is->audio_buf1;
2017 }
2018
72ea344b
FB
2019 /* if no pts, then compute it */
2020 pts = is->audio_clock;
2021 *pts_ptr = pts;
abdff646 2022 n = 2 * dec->channels;
115329f1 2023 is->audio_clock += (double)data_size /
abdff646 2024 (double)(n * dec->sample_rate);
638c9d91 2025#if defined(DEBUG_SYNC)
72ea344b
FB
2026 {
2027 static double last_clock;
2028 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
2029 is->audio_clock - last_clock,
2030 is->audio_clock, pts);
2031 last_clock = is->audio_clock;
01310af2 2032 }
72ea344b
FB
2033#endif
2034 return data_size;
01310af2
FB
2035 }
2036
72ea344b
FB
2037 /* free the current packet */
2038 if (pkt->data)
01310af2 2039 av_free_packet(pkt);
115329f1 2040
72ea344b
FB
2041 if (is->paused || is->audioq.abort_request) {
2042 return -1;
2043 }
115329f1 2044
01310af2
FB
2045 /* read next packet */
2046 if (packet_queue_get(&is->audioq, pkt, 1) < 0)
2047 return -1;
39c6a118 2048 if(pkt->data == flush_pkt.data){
abdff646 2049 avcodec_flush_buffers(dec);
39c6a118
MN
2050 continue;
2051 }
2052
bea18375
TB
2053 pkt_temp->data = pkt->data;
2054 pkt_temp->size = pkt->size;
115329f1 2055
72ea344b
FB
2056 /* if update the audio clock with the pts */
2057 if (pkt->pts != AV_NOPTS_VALUE) {
c0df9d75 2058 is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
72ea344b 2059 }
01310af2
FB
2060 }
2061}
2062
638c9d91
FB
2063/* get the current audio output buffer size, in samples. With SDL, we
2064 cannot have a precise information */
2065static int audio_write_get_buf_size(VideoState *is)
01310af2 2066{
b09b580b 2067 return is->audio_buf_size - is->audio_buf_index;
01310af2
FB
2068}
2069
2070
2071/* prepare a new audio buffer */
358061f6 2072static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
01310af2
FB
2073{
2074 VideoState *is = opaque;
2075 int audio_size, len1;
2076 double pts;
2077
2078 audio_callback_time = av_gettime();
115329f1 2079
01310af2
FB
2080 while (len > 0) {
2081 if (is->audio_buf_index >= is->audio_buf_size) {
5a4476e2 2082 audio_size = audio_decode_frame(is, &pts);
01310af2
FB
2083 if (audio_size < 0) {
2084 /* if error, just output silence */
1a1078fa 2085 is->audio_buf = is->audio_buf1;
01310af2
FB
2086 is->audio_buf_size = 1024;
2087 memset(is->audio_buf, 0, is->audio_buf_size);
2088 } else {
2089 if (is->show_audio)
2090 update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
115329f1 2091 audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
01310af2
FB
2092 pts);
2093 is->audio_buf_size = audio_size;
2094 }
2095 is->audio_buf_index = 0;
2096 }
2097 len1 = is->audio_buf_size - is->audio_buf_index;
2098 if (len1 > len)
2099 len1 = len;
2100 memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
2101 len -= len1;
2102 stream += len1;
2103 is->audio_buf_index += len1;
2104 }
2105}
2106
01310af2
FB
2107/* open a given stream. Return 0 if OK */
2108static int stream_component_open(VideoState *is, int stream_index)
2109{
2110 AVFormatContext *ic = is->ic;
fe74099a 2111 AVCodecContext *avctx;
01310af2
FB
2112 AVCodec *codec;
2113 SDL_AudioSpec wanted_spec, spec;
2114
2115 if (stream_index < 0 || stream_index >= ic->nb_streams)
2116 return -1;
fe74099a 2117 avctx = ic->streams[stream_index]->codec;
115329f1 2118
01310af2 2119 /* prepare audio output */
fe74099a
SS
2120 if (avctx->codec_type == CODEC_TYPE_AUDIO) {
2121 if (avctx->channels > 0) {
2122 avctx->request_channels = FFMIN(2, avctx->channels);
94eadc8b 2123 } else {
fe74099a 2124 avctx->request_channels = 2;
638c9d91 2125 }
01310af2
FB
2126 }
2127
fe74099a
SS
2128 codec = avcodec_find_decoder(avctx->codec_id);
2129 avctx->debug_mv = debug_mv;
2130 avctx->debug = debug;
2131 avctx->workaround_bugs = workaround_bugs;
2132 avctx->lowres = lowres;
2133 if(lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
2134 avctx->idct_algo= idct;
2135 if(fast) avctx->flags2 |= CODEC_FLAG2_FAST;
2136 avctx->skip_frame= skip_frame;
2137 avctx->skip_idct= skip_idct;
2138 avctx->skip_loop_filter= skip_loop_filter;
2139 avctx->error_recognition= error_recognition;
2140 avctx->error_concealment= error_concealment;
2141 avcodec_thread_init(avctx, thread_count);
2142
2143 set_context_opts(avctx, avcodec_opts[avctx->codec_type], 0);
e43d7a18 2144
01310af2 2145 if (!codec ||
fe74099a 2146 avcodec_open(avctx, codec) < 0)
01310af2 2147 return -1;
51b73087
JR
2148
2149 /* prepare audio output */
fe74099a
SS
2150 if (avctx->codec_type == CODEC_TYPE_AUDIO) {
2151 wanted_spec.freq = avctx->sample_rate;
51b73087 2152 wanted_spec.format = AUDIO_S16SYS;
fe74099a 2153 wanted_spec.channels = avctx->channels;
51b73087
JR
2154 wanted_spec.silence = 0;
2155 wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
2156 wanted_spec.callback = sdl_audio_callback;
2157 wanted_spec.userdata = is;
2158 if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
2159 fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
2160 return -1;
2161 }
2162 is->audio_hw_buf_size = spec.size;
5a4476e2 2163 is->audio_src_fmt= SAMPLE_FMT_S16;
51b73087
JR
2164 }
2165
3f3fe38d 2166 ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
fe74099a 2167 switch(avctx->codec_type) {
01310af2
FB
2168 case CODEC_TYPE_AUDIO:
2169 is->audio_stream = stream_index;
2170 is->audio_st = ic->streams[stream_index];
2171 is->audio_buf_size = 0;
2172 is->audio_buf_index = 0;
638c9d91
FB
2173
2174 /* init averaging filter */
2175 is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
2176 is->audio_diff_avg_count = 0;
2177 /* since we do not have a precise anough audio fifo fullness,
2178 we correct audio sync only if larger than this threshold */
fe74099a 2179 is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / avctx->sample_rate;
638c9d91 2180
01310af2
FB
2181 memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
2182 packet_queue_init(&is->audioq);
bb270c08 2183 SDL_PauseAudio(0);
01310af2
FB
2184 break;
2185 case CODEC_TYPE_VIDEO:
2186 is->video_stream = stream_index;
2187 is->video_st = ic->streams[stream_index];
2188
68aefbe8 2189// is->video_current_pts_time = av_gettime();
638c9d91 2190
01310af2
FB
2191 packet_queue_init(&is->videoq);
2192 is->video_tid = SDL_CreateThread(video_thread, is);
2193 break;
72ce053b
IC
2194 case CODEC_TYPE_SUBTITLE:
2195 is->subtitle_stream = stream_index;
2196 is->subtitle_st = ic->streams[stream_index];
2197 packet_queue_init(&is->subtitleq);
115329f1 2198
72ce053b
IC
2199 is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
2200 break;
01310af2
FB
2201 default:
2202 break;
2203 }
2204 return 0;
2205}
2206
2207static void stream_component_close(VideoState *is, int stream_index)
2208{
2209 AVFormatContext *ic = is->ic;
fe74099a 2210 AVCodecContext *avctx;
115329f1 2211
72ce053b
IC
2212 if (stream_index < 0 || stream_index >= ic->nb_streams)
2213 return;
fe74099a 2214 avctx = ic->streams[stream_index]->codec;
01310af2 2215
fe74099a 2216 switch(avctx->codec_type) {
01310af2
FB
2217 case CODEC_TYPE_AUDIO:
2218 packet_queue_abort(&is->audioq);
2219
2220 SDL_CloseAudio();
2221
2222 packet_queue_end(&is->audioq);
5a4476e2
PR
2223 if (is->reformat_ctx)
2224 av_audio_convert_free(is->reformat_ctx);
bc77fce6 2225 is->reformat_ctx = NULL;
01310af2
FB
2226 break;
2227 case CODEC_TYPE_VIDEO:
2228 packet_queue_abort(&is->videoq);
2229
2230 /* note: we also signal this mutex to make sure we deblock the
2231 video thread in all cases */
2232 SDL_LockMutex(is->pictq_mutex);
2233 SDL_CondSignal(is->pictq_cond);
2234 SDL_UnlockMutex(is->pictq_mutex);
2235
2236 SDL_WaitThread(is->video_tid, NULL);
2237
2238 packet_queue_end(&is->videoq);
2239 break;
72ce053b
IC
2240 case CODEC_TYPE_SUBTITLE:
2241 packet_queue_abort(&is->subtitleq);
115329f1 2242
72ce053b
IC
2243 /* note: we also signal this mutex to make sure we deblock the
2244 video thread in all cases */
2245 SDL_LockMutex(is->subpq_mutex);
2246 is->subtitle_stream_changed = 1;
115329f1 2247
72ce053b
IC
2248 SDL_CondSignal(is->subpq_cond);
2249 SDL_UnlockMutex(is->subpq_mutex);
2250
2251 SDL_WaitThread(is->subtitle_tid, NULL);
2252
2253 packet_queue_end(&is->subtitleq);
2254 break;
01310af2
FB
2255 default:
2256 break;
2257 }
2258
3f3fe38d 2259 ic->streams[stream_index]->discard = AVDISCARD_ALL;
fe74099a
SS
2260 avcodec_close(avctx);
2261 switch(avctx->codec_type) {
01310af2
FB
2262 case CODEC_TYPE_AUDIO:
2263 is->audio_st = NULL;
2264 is->audio_stream = -1;
2265 break;
2266 case CODEC_TYPE_VIDEO:
2267 is->video_st = NULL;
2268 is->video_stream = -1;
2269 break;
72ce053b
IC
2270 case CODEC_TYPE_SUBTITLE:
2271 is->subtitle_st = NULL;
2272 is->subtitle_stream = -1;
2273 break;
01310af2
FB
2274 default:
2275 break;
2276 }
2277}
2278
416e3508
FB
2279/* since we have only one decoding thread, we can use a global
2280 variable instead of a thread local variable */
2281static VideoState *global_video_state;
2282
2283static int decode_interrupt_cb(void)
2284{
2285 return (global_video_state && global_video_state->abort_request);
2286}
01310af2
FB
2287
2288/* this thread gets the stream from the disk or the network */
2289static int decode_thread(void *arg)
2290{
2291 VideoState *is = arg;
2292 AVFormatContext *ic;
6625a3de
MN
2293 int err, i, ret;
2294 int st_index[CODEC_TYPE_NB];
256ab3ed 2295 int st_count[CODEC_TYPE_NB]={0};
9f7490a0 2296 int st_best_packet_count[CODEC_TYPE_NB];
01310af2 2297 AVPacket pkt1, *pkt = &pkt1;
61890b02 2298 AVFormatParameters params, *ap = &params;
75bb7b0a 2299 int eof=0;
01310af2 2300
6299a229
MN
2301 ic = avformat_alloc_context();
2302
6625a3de 2303 memset(st_index, -1, sizeof(st_index));
9f7490a0 2304 memset(st_best_packet_count, -1, sizeof(st_best_packet_count));
01310af2
FB
2305 is->video_stream = -1;
2306 is->audio_stream = -1;
72ce053b 2307 is->subtitle_stream = -1;
01310af2 2308
416e3508
FB
2309 global_video_state = is;
2310 url_set_interrupt_cb(decode_interrupt_cb);
2311
61890b02 2312 memset(ap, 0, sizeof(*ap));
115329f1 2313
6299a229 2314 ap->prealloced_context = 1;
e4b89522
LW
2315 ap->width = frame_width;
2316 ap->height= frame_height;
7e042912 2317 ap->time_base= (AVRational){1, 25};
e4b89522 2318 ap->pix_fmt = frame_pix_fmt;
7e042912 2319
6299a229
MN
2320 set_context_opts(ic, avformat_opts, AV_OPT_FLAG_DECODING_PARAM);
2321
61890b02 2322 err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
638c9d91
FB
2323 if (err < 0) {
2324 print_error(is->filename, err);
2325 ret = -1;
2326 goto fail;
2327 }
01310af2 2328 is->ic = ic;
30bc6613
MN
2329
2330 if(genpts)
2331 ic->flags |= AVFMT_FLAG_GENPTS;
2332
24c07998
LA
2333 err = av_find_stream_info(ic);
2334 if (err < 0) {
2335 fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
2336 ret = -1;
2337 goto fail;
2338 }
899681cd
BA
2339 if(ic->pb)
2340 ic->pb->eof_reached= 0; //FIXME hack, ffplay maybe should not use url_feof() to test for the end
72ea344b 2341
70a4764d
MN
2342 if(seek_by_bytes<0)
2343 seek_by_bytes= !!(ic->iformat->flags & AVFMT_TS_DISCONT);
2344
72ea344b
FB
2345 /* if seeking requested, we execute it */
2346 if (start_time != AV_NOPTS_VALUE) {
2347 int64_t timestamp;
2348
2349 timestamp = start_time;
2350 /* add the stream start time */
2351 if (ic->start_time != AV_NOPTS_VALUE)
2352 timestamp += ic->start_time;
4ed29207 2353 ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
72ea344b 2354 if (ret < 0) {
115329f1 2355 fprintf(stderr, "%s: could not seek to position %0.3f\n",
72ea344b
FB
2356 is->filename, (double)timestamp / AV_TIME_BASE);
2357 }
2358 }
72ea344b 2359
01310af2 2360 for(i = 0; i < ic->nb_streams; i++) {
9f7490a0
MN
2361 AVStream *st= ic->streams[i];
2362 AVCodecContext *avctx = st->codec;
3f3fe38d 2363 ic->streams[i]->discard = AVDISCARD_ALL;
256ab3ed 2364 if(avctx->codec_type >= (unsigned)CODEC_TYPE_NB)
8ef94840 2365 continue;
256ab3ed
MN
2366 if(st_count[avctx->codec_type]++ != wanted_stream[avctx->codec_type] && wanted_stream[avctx->codec_type] >= 0)
2367 continue;
2368
9f7490a0
MN
2369 if(st_best_packet_count[avctx->codec_type] >= st->codec_info_nb_frames)
2370 continue;
2371 st_best_packet_count[avctx->codec_type]= st->codec_info_nb_frames;
2372
fe74099a 2373 switch(avctx->codec_type) {
01310af2 2374 case CODEC_TYPE_AUDIO:
256ab3ed 2375 if (!audio_disable)
6625a3de 2376 st_index[CODEC_TYPE_AUDIO] = i;
01310af2
FB
2377 break;
2378 case CODEC_TYPE_VIDEO:
16a59a7b 2379 case CODEC_TYPE_SUBTITLE:
256ab3ed
MN
2380 if (!video_disable)
2381 st_index[avctx->codec_type] = i;
16a59a7b 2382 break;
01310af2
FB
2383 default:
2384 break;
2385 }
2386 }
2387 if (show_status) {
2388 dump_format(ic, 0, is->filename, 0);
2389 }
2390
2391 /* open the streams */
6625a3de
MN
2392 if (st_index[CODEC_TYPE_AUDIO] >= 0) {
2393 stream_component_open(is, st_index[CODEC_TYPE_AUDIO]);
01310af2
FB
2394 }
2395
077a8d61 2396 ret=-1;
6625a3de
MN
2397 if (st_index[CODEC_TYPE_VIDEO] >= 0) {
2398 ret= stream_component_open(is, st_index[CODEC_TYPE_VIDEO]);
077a8d61 2399 }
d38c9e7a 2400 is->refresh_tid = SDL_CreateThread(refresh_thread, is);
077a8d61 2401 if(ret<0) {
01310af2 2402 if (!display_disable)
bf8ae197 2403 is->show_audio = 2;
01310af2
FB
2404 }
2405
6625a3de
MN
2406 if (st_index[CODEC_TYPE_SUBTITLE] >= 0) {
2407 stream_component_open(is, st_index[CODEC_TYPE_SUBTITLE]);
16a59a7b
BA
2408 }
2409
01310af2 2410 if (is->video_stream < 0 && is->audio_stream < 0) {
638c9d91
FB
2411 fprintf(stderr, "%s: could not open codecs\n", is->filename);
2412 ret = -1;
01310af2
FB
2413 goto fail;
2414 }
2415
2416 for(;;) {
2417 if (is->abort_request)
2418 break;
416e3508
FB
2419 if (is->paused != is->last_paused) {
2420 is->last_paused = is->paused;
72ea344b 2421 if (is->paused)
f5668147 2422 is->read_pause_return= av_read_pause(ic);
72ea344b
FB
2423 else
2424 av_read_play(ic);
416e3508 2425 }
2f642393
AJ
2426#if CONFIG_RTSP_DEMUXER
2427 if (is->paused && !strcmp(ic->iformat->name, "rtsp")) {
416e3508
FB
2428 /* wait 10 ms to avoid trying to get another packet */
2429 /* XXX: horrible */
2430 SDL_Delay(10);
2431 continue;
2432 }
400738b1 2433#endif
72ea344b 2434 if (is->seek_req) {
8e606cc8 2435 int64_t seek_target= is->seek_pos;
4ed29207
MN
2436 int64_t seek_min= is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
2437 int64_t seek_max= is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
2438//FIXME the +-2 is due to rounding being not done in the correct direction in generation
2439// of the seek_pos/seek_rel variables
8e606cc8 2440
4ed29207 2441 ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
72ea344b
FB
2442 if (ret < 0) {
2443 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
e6c0297f
MN
2444 }else{
2445 if (is->audio_stream >= 0) {
2446 packet_queue_flush(&is->audioq);
39c6a118 2447 packet_queue_put(&is->audioq, &flush_pkt);
e6c0297f 2448 }
72ce053b
IC
2449 if (is->subtitle_stream >= 0) {
2450 packet_queue_flush(&is->subtitleq);
39c6a118 2451 packet_queue_put(&is->subtitleq, &flush_pkt);
72ce053b 2452 }
e6c0297f
MN
2453 if (is->video_stream >= 0) {
2454 packet_queue_flush(&is->videoq);
39c6a118 2455 packet_queue_put(&is->videoq, &flush_pkt);
e6c0297f 2456 }
72ea344b
FB
2457 }
2458 is->seek_req = 0;
e45aeb38 2459 eof= 0;
72ea344b 2460 }
416e3508 2461
01310af2 2462 /* if the queue are full, no need to read more */
79ee4683
MN
2463 if ( is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
2464 || ( (is->audioq .size > MIN_AUDIOQ_SIZE || is->audio_stream<0)
2465 && (is->videoq .nb_packets > MIN_FRAMES || is->video_stream<0)
2466 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream<0))) {
01310af2
FB
2467 /* wait 10 ms */
2468 SDL_Delay(10);
2469 continue;
2470 }
75bb7b0a 2471 if(url_feof(ic->pb) || eof) {
9dc41767 2472 if(is->video_stream >= 0){
26534fe8
MN
2473 av_init_packet(pkt);
2474 pkt->data=NULL;
2475 pkt->size=0;
2476 pkt->stream_index= is->video_stream;
2477 packet_queue_put(&is->videoq, pkt);
9dc41767 2478 }
b4083171 2479 SDL_Delay(10);
2d1653b0
MN
2480 if(autoexit && is->audioq.size + is->videoq.size + is->subtitleq.size ==0){
2481 ret=AVERROR_EOF;
2482 goto fail;
2483 }
600a331c
MN
2484 continue;
2485 }
72ea344b 2486 ret = av_read_frame(ic, pkt);
01310af2 2487 if (ret < 0) {
75bb7b0a
MN
2488 if (ret == AVERROR_EOF)
2489 eof=1;
2490 if (url_ferror(ic->pb))
bb270c08 2491 break;
75bb7b0a
MN
2492 SDL_Delay(100); /* wait for user event */
2493 continue;
01310af2
FB
2494 }
2495 if (pkt->stream_index == is->audio_stream) {
2496 packet_queue_put(&is->audioq, pkt);
2497 } else if (pkt->stream_index == is->video_stream) {
2498 packet_queue_put(&is->videoq, pkt);
72ce053b
IC
2499 } else if (pkt->stream_index == is->subtitle_stream) {
2500 packet_queue_put(&is->subtitleq, pkt);
01310af2
FB
2501 } else {
2502 av_free_packet(pkt);
2503 }
2504 }
2505 /* wait until the end */
2506 while (!is->abort_request) {
2507 SDL_Delay(100);
2508 }
2509
638c9d91 2510 ret = 0;
01310af2 2511 fail:
416e3508
FB
2512 /* disable interrupting */
2513 global_video_state = NULL;
2514
01310af2
FB
2515 /* close each stream */
2516 if (is->audio_stream >= 0)
2517 stream_component_close(is, is->audio_stream);
2518 if (is->video_stream >= 0)
2519 stream_component_close(is, is->video_stream);
72ce053b
IC
2520 if (is->subtitle_stream >= 0)
2521 stream_component_close(is, is->subtitle_stream);
638c9d91
FB
2522 if (is->ic) {
2523 av_close_input_file(is->ic);
2524 is->ic = NULL; /* safety */
2525 }
416e3508
FB
2526 url_set_interrupt_cb(NULL);
2527
638c9d91
FB
2528 if (ret != 0) {
2529 SDL_Event event;
115329f1 2530
638c9d91
FB
2531 event.type = FF_QUIT_EVENT;
2532 event.user.data1 = is;
2533 SDL_PushEvent(&event);
2534 }
01310af2
FB
2535 return 0;
2536}
2537
638c9d91 2538static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
01310af2
FB
2539{
2540 VideoState *is;
2541
2542 is = av_mallocz(sizeof(VideoState));
2543 if (!is)
2544 return NULL;
f7d78f36 2545 av_strlcpy(is->filename, filename, sizeof(is->filename));
638c9d91 2546 is->iformat = iformat;
01310af2
FB
2547 is->ytop = 0;
2548 is->xleft = 0;
2549
2550 /* start video display */
2551 is->pictq_mutex = SDL_CreateMutex();
2552 is->pictq_cond = SDL_CreateCond();
115329f1 2553
72ce053b
IC
2554 is->subpq_mutex = SDL_CreateMutex();
2555 is->subpq_cond = SDL_CreateCond();
115329f1 2556
638c9d91 2557 is->av_sync_type = av_sync_type;
01310af2
FB
2558 is->parse_tid = SDL_CreateThread(decode_thread, is);
2559 if (!is->parse_tid) {
2560 av_free(is);
2561 return NULL;
2562 }
2563 return is;
2564}
2565
2566static void stream_close(VideoState *is)
2567{
2568 VideoPicture *vp;
2569 int i;
2570 /* XXX: use a special url_shutdown call to abort parse cleanly */
2571 is->abort_request = 1;
2572 SDL_WaitThread(is->parse_tid, NULL);
d38c9e7a 2573 SDL_WaitThread(is->refresh_tid, NULL);
01310af2
FB
2574
2575 /* free all pictures */
2576 for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
2577 vp = &is->pictq[i];
917d2bb3
MN
2578#if CONFIG_AVFILTER
2579 if (vp->picref) {
2580 avfilter_unref_pic(vp->picref);
2581 vp->picref = NULL;
2582 }
2583#endif
01310af2
FB
2584 if (vp->bmp) {
2585 SDL_FreeYUVOverlay(vp->bmp);
2586 vp->bmp = NULL;
2587 }
2588 }
2589 SDL_DestroyMutex(is->pictq_mutex);
2590 SDL_DestroyCond(is->pictq_cond);
72ce053b
IC
2591 SDL_DestroyMutex(is->subpq_mutex);
2592 SDL_DestroyCond(is->subpq_cond);
917d2bb3 2593#if !CONFIG_AVFILTER
3ac56e28
MS
2594 if (is->img_convert_ctx)
2595 sws_freeContext(is->img_convert_ctx);
917d2bb3 2596#endif
7c5ab145 2597 av_free(is);
01310af2
FB
2598}
2599
7b49ce2e 2600static void stream_cycle_channel(VideoState *is, int codec_type)
638c9d91
FB
2601{
2602 AVFormatContext *ic = is->ic;
2603 int start_index, stream_index;
2604 AVStream *st;
2605
2606 if (codec_type == CODEC_TYPE_VIDEO)
2607 start_index = is->video_stream;
72ce053b 2608 else if (codec_type == CODEC_TYPE_AUDIO)
638c9d91 2609 start_index = is->audio_stream;
72ce053b
IC
2610 else
2611 start_index = is->subtitle_stream;
2612 if (start_index < (codec_type == CODEC_TYPE_SUBTITLE ? -1 : 0))
638c9d91
FB
2613 return;
2614 stream_index = start_index;
2615 for(;;) {
2616 if (++stream_index >= is->ic->nb_streams)
72ce053b
IC
2617 {
2618 if (codec_type == CODEC_TYPE_SUBTITLE)
2619 {
2620 stream_index = -1;
2621 goto the_end;
2622 } else
2623 stream_index = 0;
2624 }
638c9d91
FB
2625 if (stream_index == start_index)
2626 return;
2627 st = ic->streams[stream_index];
01f4895c 2628 if (st->codec->codec_type == codec_type) {
638c9d91
FB
2629 /* check that parameters are OK */
2630 switch(codec_type) {
2631 case CODEC_TYPE_AUDIO:
01f4895c
MN
2632 if (st->codec->sample_rate != 0 &&
2633 st->codec->channels != 0)
638c9d91
FB
2634 goto the_end;
2635 break;
2636 case CODEC_TYPE_VIDEO:
72ce053b 2637 case CODEC_TYPE_SUBTITLE:
638c9d91
FB
2638 goto the_end;
2639 default:
2640 break;
2641 }
2642 }
2643 }
2644 the_end:
2645 stream_component_close(is, start_index);
2646 stream_component_open(is, stream_index);
2647}
2648
2649
7b49ce2e 2650static void toggle_full_screen(void)
01310af2 2651{
01310af2 2652 is_full_screen = !is_full_screen;
29f3b38a
MR
2653 if (!fs_screen_width) {
2654 /* use default SDL method */
fb84155b 2655// SDL_WM_ToggleFullScreen(screen);
01310af2 2656 }
fb84155b 2657 video_open(cur_stream);
01310af2
FB
2658}
2659
7b49ce2e 2660static void toggle_pause(void)
01310af2
FB
2661{
2662 if (cur_stream)
2663 stream_pause(cur_stream);
bba04f1e
WH
2664 step = 0;
2665}
2666
7b49ce2e 2667static void step_to_next_frame(void)
bba04f1e
WH
2668{
2669 if (cur_stream) {
19cc524a 2670 /* if the stream is paused unpause it, then step */
bba04f1e 2671 if (cur_stream->paused)
19cc524a 2672 stream_pause(cur_stream);
bba04f1e
WH
2673 }
2674 step = 1;
01310af2
FB
2675}
2676
7b49ce2e 2677static void do_exit(void)
01310af2 2678{
7c5ab145 2679 int i;
01310af2
FB
2680 if (cur_stream) {
2681 stream_close(cur_stream);
2682 cur_stream = NULL;
2683 }
7c5ab145
MS
2684 for (i = 0; i < CODEC_TYPE_NB; i++)
2685 av_free(avcodec_opts[i]);
2686 av_free(avformat_opts);
2687 av_free(sws_opts);
917d2bb3
MN
2688#if CONFIG_AVFILTER
2689 avfilter_uninit();
2690#endif
01310af2
FB
2691 if (show_status)
2692 printf("\n");
2693 SDL_Quit();
2694 exit(0);
2695}
2696
7b49ce2e 2697static void toggle_audio_display(void)
01310af2
FB
2698{
2699 if (cur_stream) {
f5968788 2700 int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 2701 cur_stream->show_audio = (cur_stream->show_audio + 1) % 3;
f5968788
MN
2702 fill_rectangle(screen,
2703 cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height,
2704 bgcolor);
2705 SDL_UpdateRect(screen, cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height);
01310af2
FB
2706 }
2707}
2708
2709/* handle an event sent by the GUI */
7b49ce2e 2710static void event_loop(void)
01310af2
FB
2711{
2712 SDL_Event event;
a11d11aa 2713 double incr, pos, frac;
01310af2
FB
2714
2715 for(;;) {
d52ec002 2716 double x;
01310af2
FB
2717 SDL_WaitEvent(&event);
2718 switch(event.type) {
2719 case SDL_KEYDOWN:
2720 switch(event.key.keysym.sym) {
2721 case SDLK_ESCAPE:
2722 case SDLK_q:
2723 do_exit();
2724 break;
2725 case SDLK_f:
2726 toggle_full_screen();
2727 break;
2728 case SDLK_p:
2729 case SDLK_SPACE:
2730 toggle_pause();
2731 break;
bba04f1e
WH
2732 case SDLK_s: //S: Step to next frame
2733 step_to_next_frame();
2734 break;
01310af2 2735 case SDLK_a:
115329f1 2736 if (cur_stream)
638c9d91
FB
2737 stream_cycle_channel(cur_stream, CODEC_TYPE_AUDIO);
2738 break;
2739 case SDLK_v:
115329f1 2740 if (cur_stream)
638c9d91
FB
2741 stream_cycle_channel(cur_stream, CODEC_TYPE_VIDEO);
2742 break;
72ce053b 2743 case SDLK_t:
115329f1 2744 if (cur_stream)
72ce053b
IC
2745 stream_cycle_channel(cur_stream, CODEC_TYPE_SUBTITLE);
2746 break;
638c9d91 2747 case SDLK_w:
01310af2
FB
2748 toggle_audio_display();
2749 break;
72ea344b
FB
2750 case SDLK_LEFT:
2751 incr = -10.0;
2752 goto do_seek;
2753 case SDLK_RIGHT:
2754 incr = 10.0;
2755 goto do_seek;
2756 case SDLK_UP:
2757 incr = 60.0;
2758 goto do_seek;
2759 case SDLK_DOWN:
2760 incr = -60.0;
2761 do_seek:
2762 if (cur_stream) {
94b594c6 2763 if (seek_by_bytes) {
1a620dd7
MN
2764 if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos>=0){
2765 pos= cur_stream->video_current_pos;
2766 }else if(cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos>=0){
2767 pos= cur_stream->audio_pkt.pos;
2768 }else
2769 pos = url_ftell(cur_stream->ic->pb);
94b594c6 2770 if (cur_stream->ic->bit_rate)
566cd2cb 2771 incr *= cur_stream->ic->bit_rate / 8.0;
94b594c6
SH
2772 else
2773 incr *= 180000.0;
2774 pos += incr;
2ef46053 2775 stream_seek(cur_stream, pos, incr, 1);
94b594c6
SH
2776 } else {
2777 pos = get_master_clock(cur_stream);
2778 pos += incr;
2ef46053 2779 stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
94b594c6 2780 }
72ea344b
FB
2781 }
2782 break;
01310af2
FB
2783 default:
2784 break;
2785 }
2786 break;
a11d11aa 2787 case SDL_MOUSEBUTTONDOWN:
d52ec002
MN
2788 case SDL_MOUSEMOTION:
2789 if(event.type ==SDL_MOUSEBUTTONDOWN){
2790 x= event.button.x;
2791 }else{
2792 if(event.motion.state != SDL_PRESSED)
2793 break;
2794 x= event.motion.x;
2795 }
bb270c08 2796 if (cur_stream) {
2ef46053
MN
2797 if(seek_by_bytes || cur_stream->ic->duration<=0){
2798 uint64_t size= url_fsize(cur_stream->ic->pb);
d52ec002 2799 stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
2ef46053 2800 }else{
6371c81a
MN
2801 int64_t ts;
2802 int ns, hh, mm, ss;
2803 int tns, thh, tmm, tss;
2804 tns = cur_stream->ic->duration/1000000LL;
2805 thh = tns/3600;
2806 tmm = (tns%3600)/60;
2807 tss = (tns%60);
d52ec002 2808 frac = x/cur_stream->width;
6371c81a
MN
2809 ns = frac*tns;
2810 hh = ns/3600;
2811 mm = (ns%3600)/60;
2812 ss = (ns%60);
2813 fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100,
2814 hh, mm, ss, thh, tmm, tss);
2815 ts = frac*cur_stream->ic->duration;
2816 if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
2817 ts += cur_stream->ic->start_time;
2818 stream_seek(cur_stream, ts, 0, 0);
2ef46053 2819 }
bb270c08
DB
2820 }
2821 break;
01310af2
FB
2822 case SDL_VIDEORESIZE:
2823 if (cur_stream) {
115329f1 2824 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
01310af2 2825 SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
c57d3469
MN
2826 screen_width = cur_stream->width = event.resize.w;
2827 screen_height= cur_stream->height= event.resize.h;
01310af2
FB
2828 }
2829 break;
2830 case SDL_QUIT:
638c9d91 2831 case FF_QUIT_EVENT:
01310af2
FB
2832 do_exit();
2833 break;
2834 case FF_ALLOC_EVENT:
fccb19e3 2835 video_open(event.user.data1);
01310af2
FB
2836 alloc_picture(event.user.data1);
2837 break;
2838 case FF_REFRESH_EVENT:
2839 video_refresh_timer(event.user.data1);
d38c9e7a 2840 cur_stream->refresh=0;
01310af2
FB
2841 break;
2842 default:
2843 break;
2844 }
2845 }
2846}
2847
e4b89522
LW
2848static void opt_frame_size(const char *arg)
2849{
b33ece16 2850 if (av_parse_video_frame_size(&frame_width, &frame_height, arg) < 0) {
e4b89522
LW
2851 fprintf(stderr, "Incorrect frame size\n");
2852 exit(1);
2853 }
2854 if ((frame_width % 2) != 0 || (frame_height % 2) != 0) {
2855 fprintf(stderr, "Frame size must be a multiple of 2\n");
2856 exit(1);
2857 }
2858}
2859
a5b3b5f6 2860static int opt_width(const char *opt, const char *arg)
01310af2 2861{
a5b3b5f6
SS
2862 screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2863 return 0;
01310af2
FB
2864}
2865
a5b3b5f6 2866static int opt_height(const char *opt, const char *arg)
01310af2 2867{
a5b3b5f6
SS
2868 screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2869 return 0;
01310af2
FB
2870}
2871
2872static void opt_format(const char *arg)
2873{
2874 file_iformat = av_find_input_format(arg);
2875 if (!file_iformat) {
2876 fprintf(stderr, "Unknown input format: %s\n", arg);
2877 exit(1);
2878 }
2879}
61890b02 2880
e4b89522
LW
2881static void opt_frame_pix_fmt(const char *arg)
2882{
718c7b18 2883 frame_pix_fmt = av_get_pix_fmt(arg);
e4b89522
LW
2884}
2885
b81d6235 2886static int opt_sync(const char *opt, const char *arg)
638c9d91
FB
2887{
2888 if (!strcmp(arg, "audio"))
2889 av_sync_type = AV_SYNC_AUDIO_MASTER;
2890 else if (!strcmp(arg, "video"))
2891 av_sync_type = AV_SYNC_VIDEO_MASTER;
2892 else if (!strcmp(arg, "ext"))
2893 av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
aab1b7e5 2894 else {
b81d6235 2895 fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
aab1b7e5
SS
2896 exit(1);
2897 }
b81d6235 2898 return 0;
638c9d91
FB
2899}
2900
e11bc2c6 2901static int opt_seek(const char *opt, const char *arg)
72ea344b 2902{
e11bc2c6
SS
2903 start_time = parse_time_or_die(opt, arg, 1);
2904 return 0;
72ea344b
FB
2905}
2906
a5b3b5f6 2907static int opt_debug(const char *opt, const char *arg)
e26a8335 2908{
a309073b 2909 av_log_set_level(99);
a5b3b5f6
SS
2910 debug = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
2911 return 0;
e26a8335 2912}
115329f1 2913
a5b3b5f6 2914static int opt_vismv(const char *opt, const char *arg)
0c9bbaec 2915{
a5b3b5f6
SS
2916 debug_mv = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
2917 return 0;
0c9bbaec 2918}
c62c07d3 2919
a5b3b5f6 2920static int opt_thread_count(const char *opt, const char *arg)
c62c07d3 2921{
a5b3b5f6 2922 thread_count= parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
b250f9c6 2923#if !HAVE_THREADS
c62c07d3
MN
2924 fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
2925#endif
a5b3b5f6 2926 return 0;
c62c07d3 2927}
115329f1 2928
358061f6 2929static const OptionDef options[] = {
992f8eae 2930#include "cmdutils_common_opts.h"
a5b3b5f6
SS
2931 { "x", HAS_ARG | OPT_FUNC2, {(void*)opt_width}, "force displayed width", "width" },
2932 { "y", HAS_ARG | OPT_FUNC2, {(void*)opt_height}, "force displayed height", "height" },
e4b89522 2933 { "s", HAS_ARG | OPT_VIDEO, {(void*)opt_frame_size}, "set frame size (WxH or abbreviation)", "size" },
638c9d91 2934 { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
01310af2
FB
2935 { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
2936 { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
5b369983
MN
2937 { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[CODEC_TYPE_AUDIO]}, "select desired audio stream", "stream_number" },
2938 { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[CODEC_TYPE_VIDEO]}, "select desired video stream", "stream_number" },
2939 { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[CODEC_TYPE_SUBTITLE]}, "select desired subtitle stream", "stream_number" },
e11bc2c6 2940 { "ss", HAS_ARG | OPT_FUNC2, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
674fe163 2941 { "bytes", OPT_INT | HAS_ARG, {(void*)&seek_by_bytes}, "seek by bytes 0=off 1=on -1=auto", "val" },
01310af2
FB
2942 { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
2943 { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
e4b89522 2944 { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, {(void*)opt_frame_pix_fmt}, "set pixel format", "format" },
98ae6acf 2945 { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
a5b3b5f6 2946 { "debug", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
6387c3e6 2947 { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
a5b3b5f6 2948 { "vismv", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
6fc5b059 2949 { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
30bc6613 2950 { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
59055363 2951 { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&decoder_reorder_pts}, "let decoder reorder pts 0=off 1=on -1=auto", ""},
178fcca8 2952 { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
8c3eba7c
MN
2953 { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
2954 { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
2955 { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
178fcca8 2956 { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo", "algo" },
047599a4 2957 { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_recognition}, "set error detection threshold (0-4)", "threshold" },
1b51e051 2958 { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options", "bit_mask" },
b81d6235 2959 { "sync", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
a5b3b5f6 2960 { "threads", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2d1653b0 2961 { "autoexit", OPT_BOOL | OPT_EXPERT, {(void*)&autoexit}, "exit at the end", "" },
d38c9e7a 2962 { "framedrop", OPT_BOOL | OPT_EXPERT, {(void*)&framedrop}, "drop frames when cpu is too slow", "" },
917d2bb3
MN
2963#if CONFIG_AVFILTER
2964 { "vfilters", OPT_STRING | HAS_ARG, {(void*)&vfilters}, "video filters", "filter list" },
2965#endif
2b3da32f 2966 { "rdftspeed", OPT_INT | HAS_ARG| OPT_AUDIO | OPT_EXPERT, {(void*)&rdftspeed}, "rdft speed", "msecs" },
e43d7a18 2967 { "default", OPT_FUNC2 | HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, {(void*)opt_default}, "generic catch all option", "" },
01310af2
FB
2968 { NULL, },
2969};
2970
0c2a18cb 2971static void show_usage(void)
01310af2 2972{
27daa420
RP
2973 printf("Simple media player\n");
2974 printf("usage: ffplay [options] input_file\n");
01310af2 2975 printf("\n");
0c2a18cb
RP
2976}
2977
2978static void show_help(void)
2979{
2980 show_usage();
02d504a7
FB
2981 show_help_options(options, "Main options:\n",
2982 OPT_EXPERT, 0);
2983 show_help_options(options, "\nAdvanced options:\n",
2984 OPT_EXPERT, OPT_EXPERT);
01310af2
FB
2985 printf("\nWhile playing:\n"
2986 "q, ESC quit\n"
2987 "f toggle full screen\n"
2988 "p, SPC pause\n"
638c9d91
FB
2989 "a cycle audio channel\n"
2990 "v cycle video channel\n"
72ce053b 2991 "t cycle subtitle channel\n"
638c9d91 2992 "w show audio waves\n"
72ea344b
FB
2993 "left/right seek backward/forward 10 seconds\n"
2994 "down/up seek backward/forward 1 minute\n"
a11d11aa 2995 "mouse click seek to percentage in file corresponding to fraction of width\n"
01310af2 2996 );
01310af2
FB
2997}
2998
358061f6 2999static void opt_input_file(const char *filename)
01310af2 3000{
07a70b38
SS
3001 if (input_filename) {
3002 fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
3003 filename, input_filename);
3004 exit(1);
3005 }
e8d83e1c 3006 if (!strcmp(filename, "-"))
9fcfc0b7 3007 filename = "pipe:";
01310af2
FB
3008 input_filename = filename;
3009}
3010
3011/* Called from the main */
3012int main(int argc, char **argv)
3013{
e43d7a18 3014 int flags, i;
115329f1 3015
01310af2 3016 /* register all codecs, demux and protocols */
c721d803
LA
3017 avcodec_register_all();
3018 avdevice_register_all();
917d2bb3
MN
3019#if CONFIG_AVFILTER
3020 avfilter_register_all();
3021#endif
01310af2
FB
3022 av_register_all();
3023
e43d7a18 3024 for(i=0; i<CODEC_TYPE_NB; i++){
636f1c4c 3025 avcodec_opts[i]= avcodec_alloc_context2(i);
e43d7a18 3026 }
8e2fd8e1 3027 avformat_opts = avformat_alloc_context();
917d2bb3 3028#if !CONFIG_AVFILTER
e43d7a18 3029 sws_opts = sws_getContext(16,16,0, 16,16,0, sws_flags, NULL,NULL,NULL);
917d2bb3 3030#endif
e43d7a18 3031
ea9c581f 3032 show_banner();
4cfac5bc 3033
f5da5c93 3034 parse_options(argc, argv, options, opt_input_file);
01310af2 3035
aab1b7e5 3036 if (!input_filename) {
7f11e745 3037 show_usage();
7a7da6b4 3038 fprintf(stderr, "An input file must be specified\n");
7f11e745 3039 fprintf(stderr, "Use -h to get full help or, even better, run 'man ffplay'\n");
aab1b7e5
SS
3040 exit(1);
3041 }
01310af2
FB
3042
3043 if (display_disable) {
3044 video_disable = 1;
3045 }
31319a8c 3046 flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
c97f5402
DB
3047#if !defined(__MINGW32__) && !defined(__APPLE__)
3048 flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
31319a8c 3049#endif
01310af2 3050 if (SDL_Init (flags)) {
05ab0b76 3051 fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
01310af2
FB
3052 exit(1);
3053 }
3054
3055 if (!display_disable) {
b250f9c6 3056#if HAVE_SDL_VIDEO_SIZE
3ef17d62
MR
3057 const SDL_VideoInfo *vi = SDL_GetVideoInfo();
3058 fs_screen_width = vi->current_w;
3059 fs_screen_height = vi->current_h;
29f3b38a 3060#endif
01310af2
FB
3061 }
3062
3063 SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
01310af2
FB
3064 SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
3065 SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
3066
39c6a118
MN
3067 av_init_packet(&flush_pkt);
3068 flush_pkt.data= "FLUSH";
3069
638c9d91 3070 cur_stream = stream_open(input_filename, file_iformat);
01310af2
FB
3071
3072 event_loop();
3073
3074 /* never returns */
3075
3076 return 0;
3077}