Implement framedrop.
[libav.git] / ffplay.c
CommitLineData
01310af2 1/*
f05ef45c 2 * FFplay : Simple Media Player based on the FFmpeg libraries
01310af2
FB
3 * Copyright (c) 2003 Fabrice Bellard
4 *
b78e7197
DB
5 * This file is part of FFmpeg.
6 *
7 * FFmpeg is free software; you can redistribute it and/or
01310af2
FB
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
b78e7197 10 * version 2.1 of the License, or (at your option) any later version.
01310af2 11 *
b78e7197 12 * FFmpeg is distributed in the hope that it will be useful,
01310af2
FB
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
16 *
17 * You should have received a copy of the GNU Lesser General Public
b78e7197 18 * License along with FFmpeg; if not, write to the Free Software
5509bffa 19 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
01310af2 20 */
364a9607 21
ba11257e 22#include "config.h"
8a3ceaf4 23#include <inttypes.h>
0f4e8165
RB
24#include <math.h>
25#include <limits.h>
245976da 26#include "libavutil/avstring.h"
718c7b18 27#include "libavutil/pixdesc.h"
245976da 28#include "libavformat/avformat.h"
245976da
DB
29#include "libavdevice/avdevice.h"
30#include "libswscale/swscale.h"
5a4476e2 31#include "libavcodec/audioconvert.h"
a7e6312b 32#include "libavcodec/colorspace.h"
e43d7a18 33#include "libavcodec/opt.h"
166621ab 34#include "libavcodec/avfft.h"
01310af2 35
917d2bb3
MN
36#if CONFIG_AVFILTER
37# include "libavfilter/avfilter.h"
38# include "libavfilter/avfiltergraph.h"
39# include "libavfilter/graphparser.h"
40#endif
41
01310af2
FB
42#include "cmdutils.h"
43
44#include <SDL.h>
45#include <SDL_thread.h>
46
2f30a81d 47#ifdef __MINGW32__
31319a8c
FB
48#undef main /* We don't want SDL to override our main() */
49#endif
50
d38c9e7a
MN
51#include <unistd.h>
52#include <assert.h>
53
64555bd9 54const char program_name[] = "FFplay";
ea9c581f 55const int program_birth_year = 2003;
4cfac5bc 56
638c9d91
FB
57//#define DEBUG_SYNC
58
79ee4683
MN
59#define MAX_QUEUE_SIZE (15 * 1024 * 1024)
60#define MIN_AUDIOQ_SIZE (20 * 16 * 1024)
61#define MIN_FRAMES 5
01310af2 62
638c9d91
FB
63/* SDL audio buffer size, in samples. Should be small to have precise
64 A/V sync as SDL does not have hardware buffer fullness info. */
65#define SDL_AUDIO_BUFFER_SIZE 1024
66
67/* no AV sync correction is done if below the AV sync threshold */
7e0140cb 68#define AV_SYNC_THRESHOLD 0.01
638c9d91
FB
69/* no AV correction is done if too big error */
70#define AV_NOSYNC_THRESHOLD 10.0
71
d38c9e7a
MN
72#define FRAME_SKIP_FACTOR 0.05
73
638c9d91
FB
74/* maximum audio speed change to get correct sync */
75#define SAMPLE_CORRECTION_PERCENT_MAX 10
76
77/* we use about AUDIO_DIFF_AVG_NB A-V differences to make the average */
78#define AUDIO_DIFF_AVG_NB 20
79
01310af2
FB
80/* NOTE: the size must be big enough to compensate the hardware audio buffersize size */
81#define SAMPLE_ARRAY_SIZE (2*65536)
82
917d2bb3 83#if !CONFIG_AVFILTER
03ae87a3 84static int sws_flags = SWS_BICUBIC;
917d2bb3 85#endif
03ae87a3 86
01310af2
FB
87typedef struct PacketQueue {
88 AVPacketList *first_pkt, *last_pkt;
89 int nb_packets;
90 int size;
91 int abort_request;
92 SDL_mutex *mutex;
93 SDL_cond *cond;
94} PacketQueue;
95
562f382c 96#define VIDEO_PICTURE_QUEUE_SIZE 2
72ce053b 97#define SUBPICTURE_QUEUE_SIZE 4
01310af2
FB
98
99typedef struct VideoPicture {
267e9dfa 100 double pts; ///<presentation time stamp for this picture
d38c9e7a 101 double target_clock; ///<av_gettime() time at which this should be displayed ideally
1a620dd7 102 int64_t pos; ///<byte position in file
01310af2
FB
103 SDL_Overlay *bmp;
104 int width, height; /* source height & width */
105 int allocated;
917d2bb3
MN
106 enum PixelFormat pix_fmt;
107
108#if CONFIG_AVFILTER
109 AVFilterPicRef *picref;
110#endif
01310af2
FB
111} VideoPicture;
112
72ce053b
IC
113typedef struct SubPicture {
114 double pts; /* presentation time stamp for this picture */
115 AVSubtitle sub;
116} SubPicture;
117
01310af2
FB
118enum {
119 AV_SYNC_AUDIO_MASTER, /* default choice */
120 AV_SYNC_VIDEO_MASTER,
638c9d91 121 AV_SYNC_EXTERNAL_CLOCK, /* synchronize to an external clock */
01310af2
FB
122};
123
124typedef struct VideoState {
125 SDL_Thread *parse_tid;
126 SDL_Thread *video_tid;
d38c9e7a 127 SDL_Thread *refresh_tid;
638c9d91 128 AVInputFormat *iformat;
01310af2
FB
129 int no_background;
130 int abort_request;
131 int paused;
416e3508 132 int last_paused;
72ea344b 133 int seek_req;
3ba1438d 134 int seek_flags;
72ea344b 135 int64_t seek_pos;
4ed29207 136 int64_t seek_rel;
f5668147 137 int read_pause_return;
01310af2
FB
138 AVFormatContext *ic;
139 int dtg_active_format;
140
141 int audio_stream;
115329f1 142
01310af2 143 int av_sync_type;
638c9d91
FB
144 double external_clock; /* external clock base */
145 int64_t external_clock_time;
115329f1 146
638c9d91
FB
147 double audio_clock;
148 double audio_diff_cum; /* used for AV difference average computation */
149 double audio_diff_avg_coef;
150 double audio_diff_threshold;
151 int audio_diff_avg_count;
01310af2
FB
152 AVStream *audio_st;
153 PacketQueue audioq;
154 int audio_hw_buf_size;
155 /* samples output by the codec. we reserve more space for avsync
156 compensation */
c6727809
MR
157 DECLARE_ALIGNED(16,uint8_t,audio_buf1)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
158 DECLARE_ALIGNED(16,uint8_t,audio_buf2)[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 2];
5a4476e2 159 uint8_t *audio_buf;
7fea94ce 160 unsigned int audio_buf_size; /* in bytes */
01310af2 161 int audio_buf_index; /* in bytes */
bea18375 162 AVPacket audio_pkt_temp;
01310af2 163 AVPacket audio_pkt;
5a4476e2
PR
164 enum SampleFormat audio_src_fmt;
165 AVAudioConvert *reformat_ctx;
115329f1 166
01310af2
FB
167 int show_audio; /* if true, display audio samples */
168 int16_t sample_array[SAMPLE_ARRAY_SIZE];
169 int sample_array_index;
5e0257e3 170 int last_i_start;
166621ab 171 RDFTContext *rdft;
12eeda34
MN
172 int rdft_bits;
173 int xpos;
115329f1 174
72ce053b
IC
175 SDL_Thread *subtitle_tid;
176 int subtitle_stream;
177 int subtitle_stream_changed;
178 AVStream *subtitle_st;
179 PacketQueue subtitleq;
180 SubPicture subpq[SUBPICTURE_QUEUE_SIZE];
181 int subpq_size, subpq_rindex, subpq_windex;
182 SDL_mutex *subpq_mutex;
183 SDL_cond *subpq_cond;
115329f1 184
638c9d91
FB
185 double frame_timer;
186 double frame_last_pts;
187 double frame_last_delay;
115329f1 188 double video_clock; ///<pts of last decoded frame / predicted pts of next decoded frame
01310af2
FB
189 int video_stream;
190 AVStream *video_st;
191 PacketQueue videoq;
267e9dfa 192 double video_current_pts; ///<current displayed pts (different from video_clock if frame fifos are used)
68aefbe8 193 double video_current_pts_drift; ///<video_current_pts - time (av_gettime) at which we updated video_current_pts - used to have running video pts
1a620dd7 194 int64_t video_current_pos; ///<current displayed file pos
01310af2
FB
195 VideoPicture pictq[VIDEO_PICTURE_QUEUE_SIZE];
196 int pictq_size, pictq_rindex, pictq_windex;
197 SDL_mutex *pictq_mutex;
198 SDL_cond *pictq_cond;
917d2bb3 199#if !CONFIG_AVFILTER
3ac56e28 200 struct SwsContext *img_convert_ctx;
917d2bb3 201#endif
115329f1 202
01310af2
FB
203 // QETimer *video_timer;
204 char filename[1024];
205 int width, height, xleft, ytop;
41db429d
MN
206
207 int64_t faulty_pts;
208 int64_t faulty_dts;
209 int64_t last_dts_for_fault_detection;
210 int64_t last_pts_for_fault_detection;
211
917d2bb3
MN
212#if CONFIG_AVFILTER
213 AVFilterContext *out_video_filter; ///<the last filter in the video chain
214#endif
d38c9e7a
MN
215
216 float skip_frames;
217 float skip_frames_index;
218 int refresh;
01310af2
FB
219} VideoState;
220
358061f6 221static void show_help(void);
638c9d91 222static int audio_write_get_buf_size(VideoState *is);
01310af2
FB
223
224/* options specified by the user */
225static AVInputFormat *file_iformat;
226static const char *input_filename;
227static int fs_screen_width;
228static int fs_screen_height;
fccb19e3
MN
229static int screen_width = 0;
230static int screen_height = 0;
e4b89522
LW
231static int frame_width = 0;
232static int frame_height = 0;
233static enum PixelFormat frame_pix_fmt = PIX_FMT_NONE;
01310af2
FB
234static int audio_disable;
235static int video_disable;
5b369983 236static int wanted_stream[CODEC_TYPE_NB]={
9f7490a0
MN
237 [CODEC_TYPE_AUDIO]=-1,
238 [CODEC_TYPE_VIDEO]=-1,
5b369983
MN
239 [CODEC_TYPE_SUBTITLE]=-1,
240};
70a4764d 241static int seek_by_bytes=-1;
01310af2 242static int display_disable;
1e1a0b18 243static int show_status = 1;
638c9d91 244static int av_sync_type = AV_SYNC_AUDIO_MASTER;
72ea344b 245static int64_t start_time = AV_NOPTS_VALUE;
e26a8335 246static int debug = 0;
0c9bbaec 247static int debug_mv = 0;
bba04f1e 248static int step = 0;
c62c07d3 249static int thread_count = 1;
6387c3e6 250static int workaround_bugs = 1;
6fc5b059 251static int fast = 0;
30bc6613 252static int genpts = 0;
178fcca8
MN
253static int lowres = 0;
254static int idct = FF_IDCT_AUTO;
8c3eba7c
MN
255static enum AVDiscard skip_frame= AVDISCARD_DEFAULT;
256static enum AVDiscard skip_idct= AVDISCARD_DEFAULT;
257static enum AVDiscard skip_loop_filter= AVDISCARD_DEFAULT;
047599a4 258static int error_recognition = FF_ER_CAREFUL;
1b51e051 259static int error_concealment = 3;
41db429d 260static int decoder_reorder_pts= -1;
2d1653b0 261static int autoexit;
d38c9e7a 262static int framedrop=1;
917d2bb3
MN
263#if CONFIG_AVFILTER
264static char *vfilters = NULL;
265#endif
01310af2
FB
266
267/* current context */
268static int is_full_screen;
269static VideoState *cur_stream;
5e0257e3 270static int64_t audio_callback_time;
01310af2 271
2c676c33 272static AVPacket flush_pkt;
39c6a118 273
01310af2
FB
274#define FF_ALLOC_EVENT (SDL_USEREVENT)
275#define FF_REFRESH_EVENT (SDL_USEREVENT + 1)
638c9d91 276#define FF_QUIT_EVENT (SDL_USEREVENT + 2)
01310af2 277
2c676c33 278static SDL_Surface *screen;
01310af2 279
515bd00e
MN
280static int packet_queue_put(PacketQueue *q, AVPacket *pkt);
281
01310af2
FB
282/* packet queue handling */
283static void packet_queue_init(PacketQueue *q)
284{
285 memset(q, 0, sizeof(PacketQueue));
286 q->mutex = SDL_CreateMutex();
287 q->cond = SDL_CreateCond();
515bd00e 288 packet_queue_put(q, &flush_pkt);
01310af2
FB
289}
290
72ea344b 291static void packet_queue_flush(PacketQueue *q)
01310af2
FB
292{
293 AVPacketList *pkt, *pkt1;
294
687fae2b 295 SDL_LockMutex(q->mutex);
01310af2
FB
296 for(pkt = q->first_pkt; pkt != NULL; pkt = pkt1) {
297 pkt1 = pkt->next;
298 av_free_packet(&pkt->pkt);
da6c4573 299 av_freep(&pkt);
01310af2 300 }
72ea344b
FB
301 q->last_pkt = NULL;
302 q->first_pkt = NULL;
303 q->nb_packets = 0;
304 q->size = 0;
687fae2b 305 SDL_UnlockMutex(q->mutex);
72ea344b
FB
306}
307
308static void packet_queue_end(PacketQueue *q)
309{
310 packet_queue_flush(q);
01310af2
FB
311 SDL_DestroyMutex(q->mutex);
312 SDL_DestroyCond(q->cond);
313}
314
315static int packet_queue_put(PacketQueue *q, AVPacket *pkt)
316{
317 AVPacketList *pkt1;
318
72ea344b 319 /* duplicate the packet */
39c6a118 320 if (pkt!=&flush_pkt && av_dup_packet(pkt) < 0)
72ea344b 321 return -1;
115329f1 322
01310af2
FB
323 pkt1 = av_malloc(sizeof(AVPacketList));
324 if (!pkt1)
325 return -1;
326 pkt1->pkt = *pkt;
327 pkt1->next = NULL;
328
72ea344b 329
01310af2
FB
330 SDL_LockMutex(q->mutex);
331
332 if (!q->last_pkt)
333
334 q->first_pkt = pkt1;
335 else
336 q->last_pkt->next = pkt1;
337 q->last_pkt = pkt1;
338 q->nb_packets++;
7b776589 339 q->size += pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
340 /* XXX: should duplicate packet data in DV case */
341 SDL_CondSignal(q->cond);
342
343 SDL_UnlockMutex(q->mutex);
344 return 0;
345}
346
347static void packet_queue_abort(PacketQueue *q)
348{
349 SDL_LockMutex(q->mutex);
350
351 q->abort_request = 1;
115329f1 352
01310af2
FB
353 SDL_CondSignal(q->cond);
354
355 SDL_UnlockMutex(q->mutex);
356}
357
358/* return < 0 if aborted, 0 if no packet and > 0 if packet. */
359static int packet_queue_get(PacketQueue *q, AVPacket *pkt, int block)
360{
361 AVPacketList *pkt1;
362 int ret;
363
364 SDL_LockMutex(q->mutex);
365
366 for(;;) {
367 if (q->abort_request) {
368 ret = -1;
369 break;
370 }
115329f1 371
01310af2
FB
372 pkt1 = q->first_pkt;
373 if (pkt1) {
374 q->first_pkt = pkt1->next;
375 if (!q->first_pkt)
376 q->last_pkt = NULL;
377 q->nb_packets--;
7b776589 378 q->size -= pkt1->pkt.size + sizeof(*pkt1);
01310af2
FB
379 *pkt = pkt1->pkt;
380 av_free(pkt1);
381 ret = 1;
382 break;
383 } else if (!block) {
384 ret = 0;
385 break;
386 } else {
387 SDL_CondWait(q->cond, q->mutex);
388 }
389 }
390 SDL_UnlockMutex(q->mutex);
391 return ret;
392}
393
115329f1 394static inline void fill_rectangle(SDL_Surface *screen,
01310af2
FB
395 int x, int y, int w, int h, int color)
396{
397 SDL_Rect rect;
398 rect.x = x;
399 rect.y = y;
400 rect.w = w;
401 rect.h = h;
402 SDL_FillRect(screen, &rect, color);
403}
404
405#if 0
406/* draw only the border of a rectangle */
407void fill_border(VideoState *s, int x, int y, int w, int h, int color)
408{
409 int w1, w2, h1, h2;
410
411 /* fill the background */
412 w1 = x;
413 if (w1 < 0)
414 w1 = 0;
415 w2 = s->width - (x + w);
416 if (w2 < 0)
417 w2 = 0;
418 h1 = y;
419 if (h1 < 0)
420 h1 = 0;
421 h2 = s->height - (y + h);
422 if (h2 < 0)
423 h2 = 0;
115329f1
DB
424 fill_rectangle(screen,
425 s->xleft, s->ytop,
426 w1, s->height,
01310af2 427 color);
115329f1
DB
428 fill_rectangle(screen,
429 s->xleft + s->width - w2, s->ytop,
430 w2, s->height,
01310af2 431 color);
115329f1
DB
432 fill_rectangle(screen,
433 s->xleft + w1, s->ytop,
434 s->width - w1 - w2, h1,
01310af2 435 color);
115329f1 436 fill_rectangle(screen,
01310af2
FB
437 s->xleft + w1, s->ytop + s->height - h2,
438 s->width - w1 - w2, h2,
439 color);
440}
441#endif
442
72ce053b
IC
443#define ALPHA_BLEND(a, oldp, newp, s)\
444((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s))
445
446#define RGBA_IN(r, g, b, a, s)\
447{\
448 unsigned int v = ((const uint32_t *)(s))[0];\
449 a = (v >> 24) & 0xff;\
450 r = (v >> 16) & 0xff;\
451 g = (v >> 8) & 0xff;\
452 b = v & 0xff;\
453}
454
455#define YUVA_IN(y, u, v, a, s, pal)\
456{\
57cf99f2 457 unsigned int val = ((const uint32_t *)(pal))[*(const uint8_t*)(s)];\
72ce053b
IC
458 a = (val >> 24) & 0xff;\
459 y = (val >> 16) & 0xff;\
460 u = (val >> 8) & 0xff;\
461 v = val & 0xff;\
462}
463
464#define YUVA_OUT(d, y, u, v, a)\
465{\
466 ((uint32_t *)(d))[0] = (a << 24) | (y << 16) | (u << 8) | v;\
467}
468
469
470#define BPP 1
471
0a8cd696 472static void blend_subrect(AVPicture *dst, const AVSubtitleRect *rect, int imgw, int imgh)
72ce053b
IC
473{
474 int wrap, wrap3, width2, skip2;
475 int y, u, v, a, u1, v1, a1, w, h;
476 uint8_t *lum, *cb, *cr;
477 const uint8_t *p;
478 const uint32_t *pal;
9cb5a11e
RD
479 int dstx, dsty, dstw, dsth;
480
7cf9c6ae
MN
481 dstw = av_clip(rect->w, 0, imgw);
482 dsth = av_clip(rect->h, 0, imgh);
483 dstx = av_clip(rect->x, 0, imgw - dstw);
484 dsty = av_clip(rect->y, 0, imgh - dsth);
9cb5a11e
RD
485 lum = dst->data[0] + dsty * dst->linesize[0];
486 cb = dst->data[1] + (dsty >> 1) * dst->linesize[1];
487 cr = dst->data[2] + (dsty >> 1) * dst->linesize[2];
488
f54b31b9 489 width2 = ((dstw + 1) >> 1) + (dstx & ~dstw & 1);
9cb5a11e 490 skip2 = dstx >> 1;
72ce053b 491 wrap = dst->linesize[0];
25b4c651
MN
492 wrap3 = rect->pict.linesize[0];
493 p = rect->pict.data[0];
494 pal = (const uint32_t *)rect->pict.data[1]; /* Now in YCrCb! */
115329f1 495
9cb5a11e
RD
496 if (dsty & 1) {
497 lum += dstx;
72ce053b
IC
498 cb += skip2;
499 cr += skip2;
115329f1 500
9cb5a11e 501 if (dstx & 1) {
72ce053b
IC
502 YUVA_IN(y, u, v, a, p, pal);
503 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
504 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
505 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
506 cb++;
507 cr++;
508 lum++;
509 p += BPP;
510 }
9cb5a11e 511 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
512 YUVA_IN(y, u, v, a, p, pal);
513 u1 = u;
514 v1 = v;
515 a1 = a;
516 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
517
518 YUVA_IN(y, u, v, a, p + BPP, pal);
519 u1 += u;
520 v1 += v;
521 a1 += a;
522 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
523 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
524 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
525 cb++;
526 cr++;
527 p += 2 * BPP;
528 lum += 2;
529 }
530 if (w) {
531 YUVA_IN(y, u, v, a, p, pal);
532 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
533 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
534 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
676ef505
BA
535 p++;
536 lum++;
72ce053b 537 }
4606a059
BA
538 p += wrap3 - dstw * BPP;
539 lum += wrap - dstw - dstx;
72ce053b
IC
540 cb += dst->linesize[1] - width2 - skip2;
541 cr += dst->linesize[2] - width2 - skip2;
542 }
9cb5a11e
RD
543 for(h = dsth - (dsty & 1); h >= 2; h -= 2) {
544 lum += dstx;
72ce053b
IC
545 cb += skip2;
546 cr += skip2;
115329f1 547
9cb5a11e 548 if (dstx & 1) {
72ce053b
IC
549 YUVA_IN(y, u, v, a, p, pal);
550 u1 = u;
551 v1 = v;
552 a1 = a;
553 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
554 p += wrap3;
555 lum += wrap;
556 YUVA_IN(y, u, v, a, p, pal);
557 u1 += u;
558 v1 += v;
559 a1 += a;
560 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
561 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
562 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
563 cb++;
564 cr++;
565 p += -wrap3 + BPP;
566 lum += -wrap + 1;
567 }
9cb5a11e 568 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
569 YUVA_IN(y, u, v, a, p, pal);
570 u1 = u;
571 v1 = v;
572 a1 = a;
573 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
574
f8ca63e8 575 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
576 u1 += u;
577 v1 += v;
578 a1 += a;
579 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
580 p += wrap3;
581 lum += wrap;
582
583 YUVA_IN(y, u, v, a, p, pal);
584 u1 += u;
585 v1 += v;
586 a1 += a;
587 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
588
f8ca63e8 589 YUVA_IN(y, u, v, a, p + BPP, pal);
72ce053b
IC
590 u1 += u;
591 v1 += v;
592 a1 += a;
593 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
594
595 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 2);
596 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 2);
597
598 cb++;
599 cr++;
600 p += -wrap3 + 2 * BPP;
601 lum += -wrap + 2;
602 }
603 if (w) {
604 YUVA_IN(y, u, v, a, p, pal);
605 u1 = u;
606 v1 = v;
607 a1 = a;
608 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
609 p += wrap3;
610 lum += wrap;
611 YUVA_IN(y, u, v, a, p, pal);
612 u1 += u;
613 v1 += v;
614 a1 += a;
615 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
616 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u1, 1);
617 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v1, 1);
618 cb++;
619 cr++;
620 p += -wrap3 + BPP;
621 lum += -wrap + 1;
622 }
9cb5a11e
RD
623 p += wrap3 + (wrap3 - dstw * BPP);
624 lum += wrap + (wrap - dstw - dstx);
72ce053b
IC
625 cb += dst->linesize[1] - width2 - skip2;
626 cr += dst->linesize[2] - width2 - skip2;
627 }
628 /* handle odd height */
629 if (h) {
9cb5a11e 630 lum += dstx;
72ce053b
IC
631 cb += skip2;
632 cr += skip2;
115329f1 633
9cb5a11e 634 if (dstx & 1) {
72ce053b
IC
635 YUVA_IN(y, u, v, a, p, pal);
636 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
637 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
638 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
639 cb++;
640 cr++;
641 lum++;
642 p += BPP;
643 }
9cb5a11e 644 for(w = dstw - (dstx & 1); w >= 2; w -= 2) {
72ce053b
IC
645 YUVA_IN(y, u, v, a, p, pal);
646 u1 = u;
647 v1 = v;
648 a1 = a;
649 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
650
651 YUVA_IN(y, u, v, a, p + BPP, pal);
652 u1 += u;
653 v1 += v;
654 a1 += a;
655 lum[1] = ALPHA_BLEND(a, lum[1], y, 0);
656 cb[0] = ALPHA_BLEND(a1 >> 2, cb[0], u, 1);
657 cr[0] = ALPHA_BLEND(a1 >> 2, cr[0], v, 1);
658 cb++;
659 cr++;
660 p += 2 * BPP;
661 lum += 2;
662 }
663 if (w) {
664 YUVA_IN(y, u, v, a, p, pal);
665 lum[0] = ALPHA_BLEND(a, lum[0], y, 0);
666 cb[0] = ALPHA_BLEND(a >> 2, cb[0], u, 0);
667 cr[0] = ALPHA_BLEND(a >> 2, cr[0], v, 0);
668 }
669 }
670}
671
672static void free_subpicture(SubPicture *sp)
673{
674 int i;
115329f1 675
72ce053b
IC
676 for (i = 0; i < sp->sub.num_rects; i++)
677 {
25b4c651
MN
678 av_freep(&sp->sub.rects[i]->pict.data[0]);
679 av_freep(&sp->sub.rects[i]->pict.data[1]);
db4fac64 680 av_freep(&sp->sub.rects[i]);
72ce053b 681 }
115329f1 682
72ce053b 683 av_free(sp->sub.rects);
115329f1 684
72ce053b
IC
685 memset(&sp->sub, 0, sizeof(AVSubtitle));
686}
687
01310af2
FB
688static void video_image_display(VideoState *is)
689{
690 VideoPicture *vp;
72ce053b
IC
691 SubPicture *sp;
692 AVPicture pict;
01310af2
FB
693 float aspect_ratio;
694 int width, height, x, y;
695 SDL_Rect rect;
72ce053b 696 int i;
01310af2
FB
697
698 vp = &is->pictq[is->pictq_rindex];
699 if (vp->bmp) {
917d2bb3
MN
700#if CONFIG_AVFILTER
701 if (vp->picref->pixel_aspect.num == 0)
702 aspect_ratio = 0;
703 else
704 aspect_ratio = av_q2d(vp->picref->pixel_aspect);
705#else
706
01310af2 707 /* XXX: use variable in the frame */
c30a4489
AJ
708 if (is->video_st->sample_aspect_ratio.num)
709 aspect_ratio = av_q2d(is->video_st->sample_aspect_ratio);
710 else if (is->video_st->codec->sample_aspect_ratio.num)
711 aspect_ratio = av_q2d(is->video_st->codec->sample_aspect_ratio);
72ea344b 712 else
c30a4489 713 aspect_ratio = 0;
917d2bb3 714#endif
01310af2 715 if (aspect_ratio <= 0.0)
c30a4489 716 aspect_ratio = 1.0;
917d2bb3 717 aspect_ratio *= (float)vp->width / (float)vp->height;
01310af2
FB
718 /* if an active format is indicated, then it overrides the
719 mpeg format */
720#if 0
01f4895c
MN
721 if (is->video_st->codec->dtg_active_format != is->dtg_active_format) {
722 is->dtg_active_format = is->video_st->codec->dtg_active_format;
01310af2
FB
723 printf("dtg_active_format=%d\n", is->dtg_active_format);
724 }
725#endif
726#if 0
01f4895c 727 switch(is->video_st->codec->dtg_active_format) {
01310af2
FB
728 case FF_DTG_AFD_SAME:
729 default:
730 /* nothing to do */
731 break;
732 case FF_DTG_AFD_4_3:
733 aspect_ratio = 4.0 / 3.0;
734 break;
735 case FF_DTG_AFD_16_9:
736 aspect_ratio = 16.0 / 9.0;
737 break;
738 case FF_DTG_AFD_14_9:
739 aspect_ratio = 14.0 / 9.0;
740 break;
741 case FF_DTG_AFD_4_3_SP_14_9:
742 aspect_ratio = 14.0 / 9.0;
743 break;
744 case FF_DTG_AFD_16_9_SP_14_9:
745 aspect_ratio = 14.0 / 9.0;
746 break;
747 case FF_DTG_AFD_SP_4_3:
748 aspect_ratio = 4.0 / 3.0;
749 break;
750 }
751#endif
752
72ce053b
IC
753 if (is->subtitle_st)
754 {
755 if (is->subpq_size > 0)
756 {
757 sp = &is->subpq[is->subpq_rindex];
758
759 if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000))
760 {
761 SDL_LockYUVOverlay (vp->bmp);
762
763 pict.data[0] = vp->bmp->pixels[0];
764 pict.data[1] = vp->bmp->pixels[2];
765 pict.data[2] = vp->bmp->pixels[1];
766
767 pict.linesize[0] = vp->bmp->pitches[0];
768 pict.linesize[1] = vp->bmp->pitches[2];
769 pict.linesize[2] = vp->bmp->pitches[1];
770
771 for (i = 0; i < sp->sub.num_rects; i++)
db4fac64 772 blend_subrect(&pict, sp->sub.rects[i],
0a8cd696 773 vp->bmp->w, vp->bmp->h);
72ce053b
IC
774
775 SDL_UnlockYUVOverlay (vp->bmp);
776 }
777 }
778 }
779
780
01310af2
FB
781 /* XXX: we suppose the screen has a 1.0 pixel ratio */
782 height = is->height;
bb6c34e5 783 width = ((int)rint(height * aspect_ratio)) & ~1;
01310af2
FB
784 if (width > is->width) {
785 width = is->width;
bb6c34e5 786 height = ((int)rint(width / aspect_ratio)) & ~1;
01310af2
FB
787 }
788 x = (is->width - width) / 2;
789 y = (is->height - height) / 2;
790 if (!is->no_background) {
791 /* fill the background */
792 // fill_border(is, x, y, width, height, QERGB(0x00, 0x00, 0x00));
793 } else {
794 is->no_background = 0;
795 }
796 rect.x = is->xleft + x;
2f6547fb 797 rect.y = is->ytop + y;
01310af2
FB
798 rect.w = width;
799 rect.h = height;
800 SDL_DisplayYUVOverlay(vp->bmp, &rect);
801 } else {
802#if 0
115329f1
DB
803 fill_rectangle(screen,
804 is->xleft, is->ytop, is->width, is->height,
01310af2
FB
805 QERGB(0x00, 0x00, 0x00));
806#endif
807 }
808}
809
810static inline int compute_mod(int a, int b)
811{
812 a = a % b;
115329f1 813 if (a >= 0)
01310af2
FB
814 return a;
815 else
816 return a + b;
817}
818
819static void video_audio_display(VideoState *s)
820{
821 int i, i_start, x, y1, y, ys, delay, n, nb_display_channels;
822 int ch, channels, h, h2, bgcolor, fgcolor;
823 int16_t time_diff;
4c7c7645
MN
824 int rdft_bits, nb_freq;
825
826 for(rdft_bits=1; (1<<rdft_bits)<2*s->height; rdft_bits++)
827 ;
828 nb_freq= 1<<(rdft_bits-1);
115329f1 829
01310af2 830 /* compute display index : center on currently output samples */
01f4895c 831 channels = s->audio_st->codec->channels;
01310af2 832 nb_display_channels = channels;
5e0257e3 833 if (!s->paused) {
4c7c7645 834 int data_used= s->show_audio==1 ? s->width : (2*nb_freq);
5e0257e3
FB
835 n = 2 * channels;
836 delay = audio_write_get_buf_size(s);
837 delay /= n;
115329f1 838
5e0257e3
FB
839 /* to be more precise, we take into account the time spent since
840 the last buffer computation */
841 if (audio_callback_time) {
842 time_diff = av_gettime() - audio_callback_time;
122dcdcb 843 delay -= (time_diff * s->audio_st->codec->sample_rate) / 1000000;
5e0257e3 844 }
115329f1 845
122dcdcb 846 delay += 2*data_used;
4c7c7645
MN
847 if (delay < data_used)
848 delay = data_used;
ac50bcc8
MN
849
850 i_start= x = compute_mod(s->sample_array_index - delay * channels, SAMPLE_ARRAY_SIZE);
12eeda34 851 if(s->show_audio==1){
6c7165c7
JM
852 h= INT_MIN;
853 for(i=0; i<1000; i+=channels){
854 int idx= (SAMPLE_ARRAY_SIZE + x - i) % SAMPLE_ARRAY_SIZE;
855 int a= s->sample_array[idx];
856 int b= s->sample_array[(idx + 4*channels)%SAMPLE_ARRAY_SIZE];
857 int c= s->sample_array[(idx + 5*channels)%SAMPLE_ARRAY_SIZE];
858 int d= s->sample_array[(idx + 9*channels)%SAMPLE_ARRAY_SIZE];
859 int score= a-d;
860 if(h<score && (b^c)<0){
861 h= score;
862 i_start= idx;
863 }
ac50bcc8
MN
864 }
865 }
866
5e0257e3
FB
867 s->last_i_start = i_start;
868 } else {
869 i_start = s->last_i_start;
01310af2
FB
870 }
871
01310af2 872 bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 873 if(s->show_audio==1){
6c7165c7
JM
874 fill_rectangle(screen,
875 s->xleft, s->ytop, s->width, s->height,
876 bgcolor);
877
878 fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff);
879
880 /* total height for one channel */
881 h = s->height / nb_display_channels;
882 /* graph height / 2 */
883 h2 = (h * 9) / 20;
884 for(ch = 0;ch < nb_display_channels; ch++) {
885 i = i_start + ch;
886 y1 = s->ytop + ch * h + (h / 2); /* position of center line */
887 for(x = 0; x < s->width; x++) {
888 y = (s->sample_array[i] * h2) >> 15;
889 if (y < 0) {
890 y = -y;
891 ys = y1 - y;
892 } else {
893 ys = y1;
894 }
895 fill_rectangle(screen,
896 s->xleft + x, ys, 1, y,
897 fgcolor);
898 i += channels;
899 if (i >= SAMPLE_ARRAY_SIZE)
900 i -= SAMPLE_ARRAY_SIZE;
01310af2 901 }
01310af2 902 }
01310af2 903
6c7165c7 904 fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff);
01310af2 905
6c7165c7
JM
906 for(ch = 1;ch < nb_display_channels; ch++) {
907 y = s->ytop + ch * h;
908 fill_rectangle(screen,
909 s->xleft, y, s->width, 1,
910 fgcolor);
911 }
912 SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height);
12eeda34 913 }else{
12eeda34 914 nb_display_channels= FFMIN(nb_display_channels, 2);
12eeda34 915 if(rdft_bits != s->rdft_bits){
166621ab
MR
916 av_rdft_end(s->rdft);
917 s->rdft = av_rdft_init(rdft_bits, DFT_R2C);
12eeda34
MN
918 s->rdft_bits= rdft_bits;
919 }
12eeda34
MN
920 {
921 FFTSample data[2][2*nb_freq];
922 for(ch = 0;ch < nb_display_channels; ch++) {
923 i = i_start + ch;
924 for(x = 0; x < 2*nb_freq; x++) {
925 double w= (x-nb_freq)*(1.0/nb_freq);
926 data[ch][x]= s->sample_array[i]*(1.0-w*w);
927 i += channels;
928 if (i >= SAMPLE_ARRAY_SIZE)
929 i -= SAMPLE_ARRAY_SIZE;
930 }
166621ab 931 av_rdft_calc(s->rdft, data[ch]);
12eeda34
MN
932 }
933 //least efficient way to do this, we should of course directly access it but its more than fast enough
092421cf 934 for(y=0; y<s->height; y++){
12eeda34
MN
935 double w= 1/sqrt(nb_freq);
936 int a= sqrt(w*sqrt(data[0][2*y+0]*data[0][2*y+0] + data[0][2*y+1]*data[0][2*y+1]));
937 int b= sqrt(w*sqrt(data[1][2*y+0]*data[1][2*y+0] + data[1][2*y+1]*data[1][2*y+1]));
938 a= FFMIN(a,255);
939 b= FFMIN(b,255);
940 fgcolor = SDL_MapRGB(screen->format, a, b, (a+b)/2);
941
942 fill_rectangle(screen,
943 s->xpos, s->height-y, 1, 1,
944 fgcolor);
945 }
946 }
947 SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height);
948 s->xpos++;
949 if(s->xpos >= s->width)
950 s->xpos= s->xleft;
951 }
01310af2
FB
952}
953
990c8438
MN
954static int video_open(VideoState *is){
955 int flags = SDL_HWSURFACE|SDL_ASYNCBLIT|SDL_HWACCEL;
956 int w,h;
957
fb84155b
MN
958 if(is_full_screen) flags |= SDL_FULLSCREEN;
959 else flags |= SDL_RESIZABLE;
960
990c8438
MN
961 if (is_full_screen && fs_screen_width) {
962 w = fs_screen_width;
963 h = fs_screen_height;
fb84155b
MN
964 } else if(!is_full_screen && screen_width){
965 w = screen_width;
966 h = screen_height;
917d2bb3
MN
967#if CONFIG_AVFILTER
968 }else if (is->out_video_filter && is->out_video_filter->inputs[0]){
969 w = is->out_video_filter->inputs[0]->w;
970 h = is->out_video_filter->inputs[0]->h;
971#else
fb84155b
MN
972 }else if (is->video_st && is->video_st->codec->width){
973 w = is->video_st->codec->width;
974 h = is->video_st->codec->height;
917d2bb3 975#endif
990c8438 976 } else {
fb84155b
MN
977 w = 640;
978 h = 480;
990c8438 979 }
d3d7b12e
MN
980 if(screen && is->width == screen->w && screen->w == w
981 && is->height== screen->h && screen->h == h)
982 return 0;
983
c97f5402 984#ifndef __APPLE__
990c8438
MN
985 screen = SDL_SetVideoMode(w, h, 0, flags);
986#else
987 /* setting bits_per_pixel = 0 or 32 causes blank video on OS X */
988 screen = SDL_SetVideoMode(w, h, 24, flags);
989#endif
990 if (!screen) {
991 fprintf(stderr, "SDL: could not set video mode - exiting\n");
992 return -1;
993 }
994 SDL_WM_SetCaption("FFplay", "FFplay");
995
996 is->width = screen->w;
997 is->height = screen->h;
998
999 return 0;
1000}
8c982c5d 1001
01310af2
FB
1002/* display the current picture, if any */
1003static void video_display(VideoState *is)
1004{
8c982c5d
MN
1005 if(!screen)
1006 video_open(cur_stream);
115329f1 1007 if (is->audio_st && is->show_audio)
01310af2
FB
1008 video_audio_display(is);
1009 else if (is->video_st)
1010 video_image_display(is);
1011}
1012
d38c9e7a 1013static int refresh_thread(void *opaque)
01310af2 1014{
d38c9e7a
MN
1015 VideoState *is= opaque;
1016 while(!is->abort_request){
01310af2
FB
1017 SDL_Event event;
1018 event.type = FF_REFRESH_EVENT;
1019 event.user.data1 = opaque;
d38c9e7a
MN
1020 if(!is->refresh){
1021 is->refresh=1;
01310af2 1022 SDL_PushEvent(&event);
d38c9e7a
MN
1023 }
1024 usleep(5000); //FIXME ideally we should wait the correct time but SDLs event passing is so slow it would be silly
1025 }
1026 return 0;
01310af2
FB
1027}
1028
638c9d91
FB
1029/* get the current audio clock value */
1030static double get_audio_clock(VideoState *is)
1031{
1032 double pts;
1033 int hw_buf_size, bytes_per_sec;
1034 pts = is->audio_clock;
1035 hw_buf_size = audio_write_get_buf_size(is);
1036 bytes_per_sec = 0;
1037 if (is->audio_st) {
115329f1 1038 bytes_per_sec = is->audio_st->codec->sample_rate *
01f4895c 1039 2 * is->audio_st->codec->channels;
638c9d91
FB
1040 }
1041 if (bytes_per_sec)
1042 pts -= (double)hw_buf_size / bytes_per_sec;
1043 return pts;
1044}
1045
1046/* get the current video clock value */
1047static double get_video_clock(VideoState *is)
1048{
04108619 1049 if (is->paused) {
41a4cd0c 1050 return is->video_current_pts;
72ea344b 1051 } else {
68aefbe8 1052 return is->video_current_pts_drift + av_gettime() / 1000000.0;
72ea344b 1053 }
638c9d91
FB
1054}
1055
1056/* get the current external clock value */
1057static double get_external_clock(VideoState *is)
1058{
1059 int64_t ti;
1060 ti = av_gettime();
1061 return is->external_clock + ((ti - is->external_clock_time) * 1e-6);
1062}
1063
1064/* get the current master clock value */
1065static double get_master_clock(VideoState *is)
1066{
1067 double val;
1068
72ea344b
FB
1069 if (is->av_sync_type == AV_SYNC_VIDEO_MASTER) {
1070 if (is->video_st)
1071 val = get_video_clock(is);
1072 else
1073 val = get_audio_clock(is);
1074 } else if (is->av_sync_type == AV_SYNC_AUDIO_MASTER) {
1075 if (is->audio_st)
1076 val = get_audio_clock(is);
1077 else
1078 val = get_video_clock(is);
1079 } else {
638c9d91 1080 val = get_external_clock(is);
72ea344b 1081 }
638c9d91
FB
1082 return val;
1083}
1084
72ea344b 1085/* seek in the stream */
2ef46053 1086static void stream_seek(VideoState *is, int64_t pos, int64_t rel, int seek_by_bytes)
72ea344b 1087{
687fae2b
IW
1088 if (!is->seek_req) {
1089 is->seek_pos = pos;
4ed29207 1090 is->seek_rel = rel;
3890dd3a 1091 is->seek_flags &= ~AVSEEK_FLAG_BYTE;
94b594c6
SH
1092 if (seek_by_bytes)
1093 is->seek_flags |= AVSEEK_FLAG_BYTE;
687fae2b
IW
1094 is->seek_req = 1;
1095 }
72ea344b
FB
1096}
1097
1098/* pause or resume the video */
1099static void stream_pause(VideoState *is)
1100{
68aefbe8
MN
1101 if (is->paused) {
1102 is->frame_timer += av_gettime() / 1000000.0 + is->video_current_pts_drift - is->video_current_pts;
f5668147 1103 if(is->read_pause_return != AVERROR(ENOSYS)){
68aefbe8 1104 is->video_current_pts = is->video_current_pts_drift + av_gettime() / 1000000.0;
f5668147 1105 }
68aefbe8 1106 is->video_current_pts_drift = is->video_current_pts - av_gettime() / 1000000.0;
72ea344b 1107 }
68aefbe8 1108 is->paused = !is->paused;
72ea344b
FB
1109}
1110
d38c9e7a 1111static double compute_target_time(double frame_current_pts, VideoState *is)
49410784 1112{
d38c9e7a 1113 double delay, sync_threshold, diff;
49410784
TB
1114
1115 /* compute nominal delay */
1116 delay = frame_current_pts - is->frame_last_pts;
1117 if (delay <= 0 || delay >= 10.0) {
1118 /* if incorrect delay, use previous one */
1119 delay = is->frame_last_delay;
443658fd 1120 } else {
712de377 1121 is->frame_last_delay = delay;
443658fd 1122 }
49410784
TB
1123 is->frame_last_pts = frame_current_pts;
1124
1125 /* update delay to follow master synchronisation source */
1126 if (((is->av_sync_type == AV_SYNC_AUDIO_MASTER && is->audio_st) ||
1127 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1128 /* if video is slave, we try to correct big delays by
1129 duplicating or deleting a frame */
f04c6e35 1130 diff = get_video_clock(is) - get_master_clock(is);
49410784
TB
1131
1132 /* skip or repeat frame. We take into account the
1133 delay to compute the threshold. I still don't know
1134 if it is the best guess */
1135 sync_threshold = FFMAX(AV_SYNC_THRESHOLD, delay);
1136 if (fabs(diff) < AV_NOSYNC_THRESHOLD) {
1137 if (diff <= -sync_threshold)
1138 delay = 0;
1139 else if (diff >= sync_threshold)
1140 delay = 2 * delay;
1141 }
1142 }
49410784 1143 is->frame_timer += delay;
eecc17a7
TB
1144#if defined(DEBUG_SYNC)
1145 printf("video: delay=%0.3f actual_delay=%0.3f pts=%0.3f A-V=%f\n",
1146 delay, actual_delay, frame_current_pts, -diff);
1147#endif
1148
d38c9e7a 1149 return is->frame_timer;
49410784
TB
1150}
1151
01310af2
FB
1152/* called to display each frame */
1153static void video_refresh_timer(void *opaque)
1154{
1155 VideoState *is = opaque;
1156 VideoPicture *vp;
638c9d91 1157
72ce053b 1158 SubPicture *sp, *sp2;
01310af2
FB
1159
1160 if (is->video_st) {
d38c9e7a 1161retry:
01310af2 1162 if (is->pictq_size == 0) {
d38c9e7a 1163 //nothing to do, no picture to display in the que
01310af2 1164 } else {
d38c9e7a
MN
1165 double time= av_gettime()/1000000.0;
1166 double next_target;
638c9d91 1167 /* dequeue the picture */
01310af2 1168 vp = &is->pictq[is->pictq_rindex];
638c9d91 1169
d38c9e7a
MN
1170 if(time < vp->target_clock)
1171 return;
638c9d91
FB
1172 /* update current video pts */
1173 is->video_current_pts = vp->pts;
d38c9e7a 1174 is->video_current_pts_drift = is->video_current_pts - time;
a3cc2160 1175 is->video_current_pos = vp->pos;
d38c9e7a
MN
1176 if(is->pictq_size > 1){
1177 VideoPicture *nextvp= &is->pictq[(is->pictq_rindex+1)%VIDEO_PICTURE_QUEUE_SIZE];
1178 assert(nextvp->target_clock >= vp->target_clock);
1179 next_target= nextvp->target_clock;
1180 }else{
1181 next_target= vp->target_clock + is->video_clock - vp->pts; //FIXME pass durations cleanly
1182 }
1183 if(framedrop && time > next_target){
1184 is->skip_frames *= 1.0 + FRAME_SKIP_FACTOR;
1185 if(is->pictq_size > 1 || time > next_target + 0.5){
1186 /* update queue size and signal for next picture */
1187 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1188 is->pictq_rindex = 0;
1189
1190 SDL_LockMutex(is->pictq_mutex);
1191 is->pictq_size--;
1192 SDL_CondSignal(is->pictq_cond);
1193 SDL_UnlockMutex(is->pictq_mutex);
1194 goto retry;
1195 }
1196 }
638c9d91 1197
72ce053b
IC
1198 if(is->subtitle_st) {
1199 if (is->subtitle_stream_changed) {
1200 SDL_LockMutex(is->subpq_mutex);
115329f1 1201
72ce053b
IC
1202 while (is->subpq_size) {
1203 free_subpicture(&is->subpq[is->subpq_rindex]);
115329f1 1204
72ce053b
IC
1205 /* update queue size and signal for next picture */
1206 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1207 is->subpq_rindex = 0;
115329f1 1208
72ce053b
IC
1209 is->subpq_size--;
1210 }
1211 is->subtitle_stream_changed = 0;
1212
1213 SDL_CondSignal(is->subpq_cond);
1214 SDL_UnlockMutex(is->subpq_mutex);
1215 } else {
1216 if (is->subpq_size > 0) {
1217 sp = &is->subpq[is->subpq_rindex];
1218
1219 if (is->subpq_size > 1)
1220 sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
1221 else
1222 sp2 = NULL;
1223
1224 if ((is->video_current_pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
1225 || (sp2 && is->video_current_pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
1226 {
1227 free_subpicture(sp);
1228
1229 /* update queue size and signal for next picture */
1230 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
1231 is->subpq_rindex = 0;
1232
1233 SDL_LockMutex(is->subpq_mutex);
1234 is->subpq_size--;
1235 SDL_CondSignal(is->subpq_cond);
1236 SDL_UnlockMutex(is->subpq_mutex);
1237 }
1238 }
1239 }
1240 }
1241
01310af2
FB
1242 /* display picture */
1243 video_display(is);
115329f1 1244
01310af2
FB
1245 /* update queue size and signal for next picture */
1246 if (++is->pictq_rindex == VIDEO_PICTURE_QUEUE_SIZE)
1247 is->pictq_rindex = 0;
115329f1 1248
01310af2
FB
1249 SDL_LockMutex(is->pictq_mutex);
1250 is->pictq_size--;
1251 SDL_CondSignal(is->pictq_cond);
1252 SDL_UnlockMutex(is->pictq_mutex);
1253 }
1254 } else if (is->audio_st) {
1255 /* draw the next audio frame */
1256
01310af2
FB
1257 /* if only audio stream, then display the audio bars (better
1258 than nothing, just to test the implementation */
115329f1 1259
01310af2
FB
1260 /* display picture */
1261 video_display(is);
01310af2
FB
1262 }
1263 if (show_status) {
1264 static int64_t last_time;
1265 int64_t cur_time;
72ce053b 1266 int aqsize, vqsize, sqsize;
638c9d91 1267 double av_diff;
115329f1 1268
01310af2 1269 cur_time = av_gettime();
1e1a0b18 1270 if (!last_time || (cur_time - last_time) >= 30000) {
01310af2
FB
1271 aqsize = 0;
1272 vqsize = 0;
72ce053b 1273 sqsize = 0;
01310af2
FB
1274 if (is->audio_st)
1275 aqsize = is->audioq.size;
1276 if (is->video_st)
1277 vqsize = is->videoq.size;
72ce053b
IC
1278 if (is->subtitle_st)
1279 sqsize = is->subtitleq.size;
638c9d91
FB
1280 av_diff = 0;
1281 if (is->audio_st && is->video_st)
1282 av_diff = get_audio_clock(is) - get_video_clock(is);
8a3ceaf4 1283 printf("%7.2f A-V:%7.3f aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
41db429d 1284 get_master_clock(is), av_diff, aqsize / 1024, vqsize / 1024, sqsize, is->faulty_dts, is->faulty_pts);
01310af2
FB
1285 fflush(stdout);
1286 last_time = cur_time;
1287 }
1288 }
1289}
1290
1291/* allocate a picture (needs to do that in main thread to avoid
1292 potential locking problems */
1293static void alloc_picture(void *opaque)
1294{
1295 VideoState *is = opaque;
1296 VideoPicture *vp;
01310af2
FB
1297
1298 vp = &is->pictq[is->pictq_windex];
1299
1300 if (vp->bmp)
1301 SDL_FreeYUVOverlay(vp->bmp);
1302
917d2bb3
MN
1303#if CONFIG_AVFILTER
1304 if (vp->picref)
1305 avfilter_unref_pic(vp->picref);
1306 vp->picref = NULL;
1307
1308 vp->width = is->out_video_filter->inputs[0]->w;
1309 vp->height = is->out_video_filter->inputs[0]->h;
1310 vp->pix_fmt = is->out_video_filter->inputs[0]->format;
1311#else
1312 vp->width = is->video_st->codec->width;
1313 vp->height = is->video_st->codec->height;
1314 vp->pix_fmt = is->video_st->codec->pix_fmt;
1315#endif
1316
1317 vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height,
115329f1 1318 SDL_YV12_OVERLAY,
61890b02 1319 screen);
01310af2
FB
1320
1321 SDL_LockMutex(is->pictq_mutex);
1322 vp->allocated = 1;
1323 SDL_CondSignal(is->pictq_cond);
1324 SDL_UnlockMutex(is->pictq_mutex);
1325}
1326
267e9dfa
MN
1327/**
1328 *
1329 * @param pts the dts of the pkt / pts of the frame and guessed if not known
1330 */
1a620dd7 1331static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, int64_t pos)
01310af2
FB
1332{
1333 VideoPicture *vp;
1334 int dst_pix_fmt;
917d2bb3
MN
1335#if CONFIG_AVFILTER
1336 AVPicture pict_src;
1337#endif
01310af2
FB
1338 /* wait until we have space to put a new picture */
1339 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1340
1341 if(is->pictq_size>=VIDEO_PICTURE_QUEUE_SIZE && !is->refresh)
1342 is->skip_frames= FFMAX(1.0 - FRAME_SKIP_FACTOR, is->skip_frames * (1.0-FRAME_SKIP_FACTOR));
1343
01310af2
FB
1344 while (is->pictq_size >= VIDEO_PICTURE_QUEUE_SIZE &&
1345 !is->videoq.abort_request) {
1346 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1347 }
1348 SDL_UnlockMutex(is->pictq_mutex);
115329f1 1349
01310af2
FB
1350 if (is->videoq.abort_request)
1351 return -1;
1352
1353 vp = &is->pictq[is->pictq_windex];
1354
1355 /* alloc or resize hardware picture buffer */
115329f1 1356 if (!vp->bmp ||
917d2bb3
MN
1357#if CONFIG_AVFILTER
1358 vp->width != is->out_video_filter->inputs[0]->w ||
1359 vp->height != is->out_video_filter->inputs[0]->h) {
1360#else
01f4895c
MN
1361 vp->width != is->video_st->codec->width ||
1362 vp->height != is->video_st->codec->height) {
917d2bb3 1363#endif
01310af2
FB
1364 SDL_Event event;
1365
1366 vp->allocated = 0;
1367
1368 /* the allocation must be done in the main thread to avoid
1369 locking problems */
1370 event.type = FF_ALLOC_EVENT;
1371 event.user.data1 = is;
1372 SDL_PushEvent(&event);
115329f1 1373
01310af2
FB
1374 /* wait until the picture is allocated */
1375 SDL_LockMutex(is->pictq_mutex);
1376 while (!vp->allocated && !is->videoq.abort_request) {
1377 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1378 }
1379 SDL_UnlockMutex(is->pictq_mutex);
1380
1381 if (is->videoq.abort_request)
1382 return -1;
1383 }
1384
638c9d91 1385 /* if the frame is not skipped, then display it */
01310af2 1386 if (vp->bmp) {
fbf1b885 1387 AVPicture pict;
917d2bb3
MN
1388#if CONFIG_AVFILTER
1389 if(vp->picref)
1390 avfilter_unref_pic(vp->picref);
1391 vp->picref = src_frame->opaque;
1392#endif
fbf1b885 1393
01310af2
FB
1394 /* get a pointer on the bitmap */
1395 SDL_LockYUVOverlay (vp->bmp);
1396
1397 dst_pix_fmt = PIX_FMT_YUV420P;
fbf1b885 1398 memset(&pict,0,sizeof(AVPicture));
01310af2
FB
1399 pict.data[0] = vp->bmp->pixels[0];
1400 pict.data[1] = vp->bmp->pixels[2];
1401 pict.data[2] = vp->bmp->pixels[1];
1402
1403 pict.linesize[0] = vp->bmp->pitches[0];
1404 pict.linesize[1] = vp->bmp->pitches[2];
1405 pict.linesize[2] = vp->bmp->pitches[1];
917d2bb3
MN
1406
1407#if CONFIG_AVFILTER
1408 pict_src.data[0] = src_frame->data[0];
1409 pict_src.data[1] = src_frame->data[1];
1410 pict_src.data[2] = src_frame->data[2];
1411
1412 pict_src.linesize[0] = src_frame->linesize[0];
1413 pict_src.linesize[1] = src_frame->linesize[1];
1414 pict_src.linesize[2] = src_frame->linesize[2];
1415
1416 //FIXME use direct rendering
1417 av_picture_copy(&pict, &pict_src,
1418 vp->pix_fmt, vp->width, vp->height);
1419#else
e43d7a18 1420 sws_flags = av_get_int(sws_opts, "sws_flags", NULL);
3ac56e28 1421 is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx,
917d2bb3 1422 vp->width, vp->height, vp->pix_fmt, vp->width, vp->height,
feb7bc67 1423 dst_pix_fmt, sws_flags, NULL, NULL, NULL);
3ac56e28 1424 if (is->img_convert_ctx == NULL) {
26ba8235
AB
1425 fprintf(stderr, "Cannot initialize the conversion context\n");
1426 exit(1);
1427 }
3ac56e28 1428 sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize,
917d2bb3
MN
1429 0, vp->height, pict.data, pict.linesize);
1430#endif
01310af2
FB
1431 /* update the bitmap content */
1432 SDL_UnlockYUVOverlay(vp->bmp);
1433
638c9d91 1434 vp->pts = pts;
1a620dd7 1435 vp->pos = pos;
01310af2
FB
1436
1437 /* now we can update the picture count */
1438 if (++is->pictq_windex == VIDEO_PICTURE_QUEUE_SIZE)
1439 is->pictq_windex = 0;
1440 SDL_LockMutex(is->pictq_mutex);
d38c9e7a
MN
1441 vp->target_clock= compute_target_time(vp->pts, is);
1442
01310af2
FB
1443 is->pictq_size++;
1444 SDL_UnlockMutex(is->pictq_mutex);
1445 }
638c9d91
FB
1446 return 0;
1447}
1448
115329f1
DB
1449/**
1450 * compute the exact PTS for the picture if it is omitted in the stream
267e9dfa
MN
1451 * @param pts1 the dts of the pkt / pts of the frame
1452 */
1a620dd7 1453static int output_picture2(VideoState *is, AVFrame *src_frame, double pts1, int64_t pos)
638c9d91
FB
1454{
1455 double frame_delay, pts;
115329f1 1456
638c9d91
FB
1457 pts = pts1;
1458
01310af2 1459 if (pts != 0) {
638c9d91 1460 /* update video clock with pts, if present */
01310af2
FB
1461 is->video_clock = pts;
1462 } else {
72ea344b
FB
1463 pts = is->video_clock;
1464 }
1465 /* update video clock for next frame */
01f4895c 1466 frame_delay = av_q2d(is->video_st->codec->time_base);
72ea344b
FB
1467 /* for MPEG2, the frame can be repeated, so we update the
1468 clock accordingly */
267e9dfa 1469 frame_delay += src_frame->repeat_pict * (frame_delay * 0.5);
72ea344b 1470 is->video_clock += frame_delay;
638c9d91
FB
1471
1472#if defined(DEBUG_SYNC) && 0
ff358eca
SS
1473 printf("frame_type=%c clock=%0.3f pts=%0.3f\n",
1474 av_get_pict_type_char(src_frame->pict_type), pts, pts1);
638c9d91 1475#endif
1a620dd7 1476 return queue_picture(is, src_frame, pts, pos);
01310af2
FB
1477}
1478
3966a574 1479static int get_video_frame(VideoState *is, AVFrame *frame, int64_t *pts, AVPacket *pkt)
01310af2 1480{
6c7d3ead 1481 int len1, got_picture, i;
01310af2 1482
01310af2 1483 if (packet_queue_get(&is->videoq, pkt, 1) < 0)
917d2bb3 1484 return -1;
39c6a118
MN
1485
1486 if(pkt->data == flush_pkt.data){
1487 avcodec_flush_buffers(is->video_st->codec);
6c7d3ead
MN
1488
1489 SDL_LockMutex(is->pictq_mutex);
1490 //Make sure there are no long delay timers (ideally we should just flush the que but thats harder)
1491 for(i=0; i<VIDEO_PICTURE_QUEUE_SIZE; i++){
d38c9e7a 1492 is->pictq[i].target_clock= 0;
6c7d3ead
MN
1493 }
1494 while (is->pictq_size && !is->videoq.abort_request) {
1495 SDL_CondWait(is->pictq_cond, is->pictq_mutex);
1496 }
1a620dd7 1497 is->video_current_pos= -1;
6c7d3ead
MN
1498 SDL_UnlockMutex(is->pictq_mutex);
1499
41db429d
MN
1500 is->last_dts_for_fault_detection=
1501 is->last_pts_for_fault_detection= INT64_MIN;
967030eb 1502 is->frame_last_pts= AV_NOPTS_VALUE;
f7119e42 1503 is->frame_last_delay = 0;
b25453bd 1504 is->frame_timer = (double)av_gettime() / 1000000.0;
d38c9e7a
MN
1505 is->skip_frames= 1;
1506 is->skip_frames_index= 0;
917d2bb3 1507 return 0;
39c6a118
MN
1508 }
1509
638c9d91
FB
1510 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1511 this packet, if any */
7fb262b5 1512 is->video_st->codec->reordered_opaque= pkt->pts;
bea18375 1513 len1 = avcodec_decode_video2(is->video_st->codec,
620e8baf 1514 frame, &got_picture,
bea18375 1515 pkt);
620e8baf 1516
99e0b12b 1517 if (got_picture) {
df7d6e48
SS
1518 if(pkt->dts != AV_NOPTS_VALUE){
1519 is->faulty_dts += pkt->dts <= is->last_dts_for_fault_detection;
1520 is->last_dts_for_fault_detection= pkt->dts;
1521 }
1522 if(frame->reordered_opaque != AV_NOPTS_VALUE){
1523 is->faulty_pts += frame->reordered_opaque <= is->last_pts_for_fault_detection;
1524 is->last_pts_for_fault_detection= frame->reordered_opaque;
1525 }
99e0b12b 1526 }
41db429d
MN
1527
1528 if( ( decoder_reorder_pts==1
ecbed31c 1529 || (decoder_reorder_pts && is->faulty_pts<is->faulty_dts)
41db429d 1530 || pkt->dts == AV_NOPTS_VALUE)
7fb262b5 1531 && frame->reordered_opaque != AV_NOPTS_VALUE)
917d2bb3 1532 *pts= frame->reordered_opaque;
620e8baf 1533 else if(pkt->dts != AV_NOPTS_VALUE)
917d2bb3 1534 *pts= pkt->dts;
620e8baf 1535 else
917d2bb3
MN
1536 *pts= 0;
1537
fb966f99
MN
1538// if (len1 < 0)
1539// break;
d38c9e7a
MN
1540 if (got_picture){
1541 is->skip_frames_index += 1;
1542 if(is->skip_frames_index >= is->skip_frames){
1543 is->skip_frames_index -= FFMAX(is->skip_frames, 1.0);
1544 return 1;
1545 }
1546
1547 }
917d2bb3
MN
1548 return 0;
1549}
1550
1551#if CONFIG_AVFILTER
1552typedef struct {
1553 VideoState *is;
1554 AVFrame *frame;
1555} FilterPriv;
1556
1557static int input_init(AVFilterContext *ctx, const char *args, void *opaque)
1558{
1559 FilterPriv *priv = ctx->priv;
1560 if(!opaque) return -1;
1561
1562 priv->is = opaque;
1563 priv->frame = avcodec_alloc_frame();
1564
1565 return 0;
1566}
1567
1568static void input_uninit(AVFilterContext *ctx)
1569{
1570 FilterPriv *priv = ctx->priv;
1571 av_free(priv->frame);
1572}
1573
1574static int input_request_frame(AVFilterLink *link)
1575{
1576 FilterPriv *priv = link->src->priv;
1577 AVFilterPicRef *picref;
3966a574 1578 int64_t pts = 0;
917d2bb3
MN
1579 AVPacket pkt;
1580 int ret;
1581
1582 while (!(ret = get_video_frame(priv->is, priv->frame, &pts, &pkt)))
1583 av_free_packet(&pkt);
1584 if (ret < 0)
1585 return -1;
1586
1587 /* FIXME: until I figure out how to hook everything up to the codec
1588 * right, we're just copying the entire frame. */
1589 picref = avfilter_get_video_buffer(link, AV_PERM_WRITE, link->w, link->h);
1590 av_picture_copy((AVPicture *)&picref->data, (AVPicture *)priv->frame,
1591 picref->pic->format, link->w, link->h);
1592 av_free_packet(&pkt);
1593
1594 picref->pts = pts;
1595 picref->pixel_aspect = priv->is->video_st->codec->sample_aspect_ratio;
1596 avfilter_start_frame(link, avfilter_ref_pic(picref, ~0));
1597 avfilter_draw_slice(link, 0, link->h, 1);
1598 avfilter_end_frame(link);
1599 avfilter_unref_pic(picref);
1600
1601 return 0;
1602}
1603
1604static int input_query_formats(AVFilterContext *ctx)
1605{
1606 FilterPriv *priv = ctx->priv;
1607 enum PixelFormat pix_fmts[] = {
1608 priv->is->video_st->codec->pix_fmt, PIX_FMT_NONE
1609 };
1610
1611 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1612 return 0;
1613}
1614
1615static int input_config_props(AVFilterLink *link)
1616{
1617 FilterPriv *priv = link->src->priv;
1618 AVCodecContext *c = priv->is->video_st->codec;
1619
1620 link->w = c->width;
1621 link->h = c->height;
1622
1623 return 0;
1624}
1625
1626static AVFilter input_filter =
1627{
1628 .name = "ffplay_input",
1629
1630 .priv_size = sizeof(FilterPriv),
1631
1632 .init = input_init,
1633 .uninit = input_uninit,
1634
1635 .query_formats = input_query_formats,
1636
1637 .inputs = (AVFilterPad[]) {{ .name = NULL }},
1638 .outputs = (AVFilterPad[]) {{ .name = "default",
1639 .type = CODEC_TYPE_VIDEO,
1640 .request_frame = input_request_frame,
1641 .config_props = input_config_props, },
1642 { .name = NULL }},
1643};
1644
1645static void output_end_frame(AVFilterLink *link)
1646{
1647}
1648
1649static int output_query_formats(AVFilterContext *ctx)
1650{
1651 enum PixelFormat pix_fmts[] = { PIX_FMT_YUV420P, PIX_FMT_NONE };
1652
1653 avfilter_set_common_formats(ctx, avfilter_make_format_list(pix_fmts));
1654 return 0;
1655}
1656
1657static int get_filtered_video_frame(AVFilterContext *ctx, AVFrame *frame,
3966a574 1658 int64_t *pts)
917d2bb3
MN
1659{
1660 AVFilterPicRef *pic;
1661
1662 if(avfilter_request_frame(ctx->inputs[0]))
1663 return -1;
1664 if(!(pic = ctx->inputs[0]->cur_pic))
1665 return -1;
1666 ctx->inputs[0]->cur_pic = NULL;
1667
1668 frame->opaque = pic;
1669 *pts = pic->pts;
1670
1671 memcpy(frame->data, pic->data, sizeof(frame->data));
1672 memcpy(frame->linesize, pic->linesize, sizeof(frame->linesize));
1673
1674 return 1;
1675}
1676
1677static AVFilter output_filter =
1678{
1679 .name = "ffplay_output",
1680
1681 .query_formats = output_query_formats,
1682
1683 .inputs = (AVFilterPad[]) {{ .name = "default",
1684 .type = CODEC_TYPE_VIDEO,
1685 .end_frame = output_end_frame,
1686 .min_perms = AV_PERM_READ, },
1687 { .name = NULL }},
1688 .outputs = (AVFilterPad[]) {{ .name = NULL }},
1689};
1690#endif /* CONFIG_AVFILTER */
1691
1692static int video_thread(void *arg)
1693{
1694 VideoState *is = arg;
1695 AVFrame *frame= avcodec_alloc_frame();
3966a574 1696 int64_t pts_int;
917d2bb3
MN
1697 double pts;
1698 int ret;
1699
1700#if CONFIG_AVFILTER
1701 AVFilterContext *filt_src = NULL, *filt_out = NULL;
1702 AVFilterGraph *graph = av_mallocz(sizeof(AVFilterGraph));
1703 graph->scale_sws_opts = av_strdup("sws_flags=bilinear");
1704
1705 if(!(filt_src = avfilter_open(&input_filter, "src"))) goto the_end;
1706 if(!(filt_out = avfilter_open(&output_filter, "out"))) goto the_end;
1707
1708 if(avfilter_init_filter(filt_src, NULL, is)) goto the_end;
1709 if(avfilter_init_filter(filt_out, NULL, frame)) goto the_end;
1710
1711
1712 if(vfilters) {
1713 AVFilterInOut *outputs = av_malloc(sizeof(AVFilterInOut));
1714 AVFilterInOut *inputs = av_malloc(sizeof(AVFilterInOut));
1715
1716 outputs->name = av_strdup("in");
1717 outputs->filter = filt_src;
1718 outputs->pad_idx = 0;
1719 outputs->next = NULL;
1720
1721 inputs->name = av_strdup("out");
1722 inputs->filter = filt_out;
1723 inputs->pad_idx = 0;
1724 inputs->next = NULL;
1725
1726 if (avfilter_graph_parse(graph, vfilters, inputs, outputs, NULL) < 0)
1727 goto the_end;
1728 av_freep(&vfilters);
1729 } else {
1730 if(avfilter_link(filt_src, 0, filt_out, 0) < 0) goto the_end;
1731 }
1732 avfilter_graph_add_filter(graph, filt_src);
1733 avfilter_graph_add_filter(graph, filt_out);
1734
1735 if(avfilter_graph_check_validity(graph, NULL)) goto the_end;
1736 if(avfilter_graph_config_formats(graph, NULL)) goto the_end;
1737 if(avfilter_graph_config_links(graph, NULL)) goto the_end;
1738
1739 is->out_video_filter = filt_out;
1740#endif
1741
1742 for(;;) {
1743#if !CONFIG_AVFILTER
1744 AVPacket pkt;
1745#endif
1746 while (is->paused && !is->videoq.abort_request)
1747 SDL_Delay(10);
1748#if CONFIG_AVFILTER
1749 ret = get_filtered_video_frame(filt_out, frame, &pts_int);
1750#else
1751 ret = get_video_frame(is, frame, &pts_int, &pkt);
1752#endif
1753
1754 if (ret < 0) goto the_end;
1755
1756 if (!ret)
1757 continue;
1758
3966a574 1759 pts = pts_int*av_q2d(is->video_st->time_base);
917d2bb3
MN
1760
1761#if CONFIG_AVFILTER
1762 ret = output_picture2(is, frame, pts, -1); /* fixme: unknown pos */
1763#else
fca62599 1764 ret = output_picture2(is, frame, pts, pkt.pos);
917d2bb3
MN
1765 av_free_packet(&pkt);
1766#endif
1767 if (ret < 0)
1768 goto the_end;
1769
115329f1 1770 if (step)
bba04f1e
WH
1771 if (cur_stream)
1772 stream_pause(cur_stream);
01310af2
FB
1773 }
1774 the_end:
917d2bb3
MN
1775#if CONFIG_AVFILTER
1776 avfilter_graph_destroy(graph);
1777 av_freep(&graph);
1778#endif
c6b1edc9 1779 av_free(frame);
01310af2
FB
1780 return 0;
1781}
1782
72ce053b
IC
1783static int subtitle_thread(void *arg)
1784{
1785 VideoState *is = arg;
1786 SubPicture *sp;
1787 AVPacket pkt1, *pkt = &pkt1;
1788 int len1, got_subtitle;
1789 double pts;
1790 int i, j;
1791 int r, g, b, y, u, v, a;
1792
1793 for(;;) {
1794 while (is->paused && !is->subtitleq.abort_request) {
1795 SDL_Delay(10);
1796 }
1797 if (packet_queue_get(&is->subtitleq, pkt, 1) < 0)
1798 break;
115329f1 1799
39c6a118
MN
1800 if(pkt->data == flush_pkt.data){
1801 avcodec_flush_buffers(is->subtitle_st->codec);
1802 continue;
1803 }
72ce053b
IC
1804 SDL_LockMutex(is->subpq_mutex);
1805 while (is->subpq_size >= SUBPICTURE_QUEUE_SIZE &&
1806 !is->subtitleq.abort_request) {
1807 SDL_CondWait(is->subpq_cond, is->subpq_mutex);
1808 }
1809 SDL_UnlockMutex(is->subpq_mutex);
115329f1 1810
72ce053b
IC
1811 if (is->subtitleq.abort_request)
1812 goto the_end;
115329f1 1813
72ce053b
IC
1814 sp = &is->subpq[is->subpq_windex];
1815
1816 /* NOTE: ipts is the PTS of the _first_ picture beginning in
1817 this packet, if any */
1818 pts = 0;
1819 if (pkt->pts != AV_NOPTS_VALUE)
1820 pts = av_q2d(is->subtitle_st->time_base)*pkt->pts;
1821
bea18375 1822 len1 = avcodec_decode_subtitle2(is->subtitle_st->codec,
115329f1 1823 &sp->sub, &got_subtitle,
bea18375 1824 pkt);
72ce053b
IC
1825// if (len1 < 0)
1826// break;
1827 if (got_subtitle && sp->sub.format == 0) {
1828 sp->pts = pts;
115329f1 1829
72ce053b
IC
1830 for (i = 0; i < sp->sub.num_rects; i++)
1831 {
db4fac64 1832 for (j = 0; j < sp->sub.rects[i]->nb_colors; j++)
72ce053b 1833 {
25b4c651 1834 RGBA_IN(r, g, b, a, (uint32_t*)sp->sub.rects[i]->pict.data[1] + j);
72ce053b
IC
1835 y = RGB_TO_Y_CCIR(r, g, b);
1836 u = RGB_TO_U_CCIR(r, g, b, 0);
1837 v = RGB_TO_V_CCIR(r, g, b, 0);
25b4c651 1838 YUVA_OUT((uint32_t*)sp->sub.rects[i]->pict.data[1] + j, y, u, v, a);
72ce053b
IC
1839 }
1840 }
1841
1842 /* now we can update the picture count */
1843 if (++is->subpq_windex == SUBPICTURE_QUEUE_SIZE)
1844 is->subpq_windex = 0;
1845 SDL_LockMutex(is->subpq_mutex);
1846 is->subpq_size++;
1847 SDL_UnlockMutex(is->subpq_mutex);
1848 }
1849 av_free_packet(pkt);
115329f1 1850// if (step)
72ce053b
IC
1851// if (cur_stream)
1852// stream_pause(cur_stream);
1853 }
1854 the_end:
1855 return 0;
1856}
1857
01310af2
FB
1858/* copy samples for viewing in editor window */
1859static void update_sample_display(VideoState *is, short *samples, int samples_size)
1860{
1861 int size, len, channels;
1862
01f4895c 1863 channels = is->audio_st->codec->channels;
01310af2
FB
1864
1865 size = samples_size / sizeof(short);
1866 while (size > 0) {
1867 len = SAMPLE_ARRAY_SIZE - is->sample_array_index;
1868 if (len > size)
1869 len = size;
1870 memcpy(is->sample_array + is->sample_array_index, samples, len * sizeof(short));
1871 samples += len;
1872 is->sample_array_index += len;
1873 if (is->sample_array_index >= SAMPLE_ARRAY_SIZE)
1874 is->sample_array_index = 0;
1875 size -= len;
1876 }
1877}
1878
01310af2
FB
1879/* return the new audio buffer size (samples can be added or deleted
1880 to get better sync if video or external master clock) */
115329f1 1881static int synchronize_audio(VideoState *is, short *samples,
638c9d91 1882 int samples_size1, double pts)
01310af2 1883{
638c9d91 1884 int n, samples_size;
01310af2 1885 double ref_clock;
115329f1 1886
01f4895c 1887 n = 2 * is->audio_st->codec->channels;
638c9d91 1888 samples_size = samples_size1;
01310af2 1889
01310af2 1890 /* if not master, then we try to remove or add samples to correct the clock */
01310af2 1891 if (((is->av_sync_type == AV_SYNC_VIDEO_MASTER && is->video_st) ||
638c9d91
FB
1892 is->av_sync_type == AV_SYNC_EXTERNAL_CLOCK)) {
1893 double diff, avg_diff;
01310af2 1894 int wanted_size, min_size, max_size, nb_samples;
115329f1 1895
638c9d91
FB
1896 ref_clock = get_master_clock(is);
1897 diff = get_audio_clock(is) - ref_clock;
115329f1 1898
638c9d91
FB
1899 if (diff < AV_NOSYNC_THRESHOLD) {
1900 is->audio_diff_cum = diff + is->audio_diff_avg_coef * is->audio_diff_cum;
1901 if (is->audio_diff_avg_count < AUDIO_DIFF_AVG_NB) {
1902 /* not enough measures to have a correct estimate */
1903 is->audio_diff_avg_count++;
1904 } else {
1905 /* estimate the A-V difference */
1906 avg_diff = is->audio_diff_cum * (1.0 - is->audio_diff_avg_coef);
1907
1908 if (fabs(avg_diff) >= is->audio_diff_threshold) {
01f4895c 1909 wanted_size = samples_size + ((int)(diff * is->audio_st->codec->sample_rate) * n);
638c9d91 1910 nb_samples = samples_size / n;
115329f1 1911
638c9d91
FB
1912 min_size = ((nb_samples * (100 - SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1913 max_size = ((nb_samples * (100 + SAMPLE_CORRECTION_PERCENT_MAX)) / 100) * n;
1914 if (wanted_size < min_size)
1915 wanted_size = min_size;
1916 else if (wanted_size > max_size)
1917 wanted_size = max_size;
115329f1 1918
638c9d91
FB
1919 /* add or remove samples to correction the synchro */
1920 if (wanted_size < samples_size) {
1921 /* remove samples */
1922 samples_size = wanted_size;
1923 } else if (wanted_size > samples_size) {
1924 uint8_t *samples_end, *q;
1925 int nb;
115329f1 1926
638c9d91
FB
1927 /* add samples */
1928 nb = (samples_size - wanted_size);
1929 samples_end = (uint8_t *)samples + samples_size - n;
1930 q = samples_end + n;
1931 while (nb > 0) {
1932 memcpy(q, samples_end, n);
1933 q += n;
1934 nb -= n;
1935 }
1936 samples_size = wanted_size;
1937 }
1938 }
1939#if 0
115329f1
DB
1940 printf("diff=%f adiff=%f sample_diff=%d apts=%0.3f vpts=%0.3f %f\n",
1941 diff, avg_diff, samples_size - samples_size1,
638c9d91
FB
1942 is->audio_clock, is->video_clock, is->audio_diff_threshold);
1943#endif
01310af2 1944 }
638c9d91
FB
1945 } else {
1946 /* too big difference : may be initial PTS errors, so
1947 reset A-V filter */
1948 is->audio_diff_avg_count = 0;
1949 is->audio_diff_cum = 0;
01310af2
FB
1950 }
1951 }
1952
01310af2
FB
1953 return samples_size;
1954}
1955
1956/* decode one audio frame and returns its uncompressed size */
5a4476e2 1957static int audio_decode_frame(VideoState *is, double *pts_ptr)
01310af2 1958{
bea18375 1959 AVPacket *pkt_temp = &is->audio_pkt_temp;
01310af2 1960 AVPacket *pkt = &is->audio_pkt;
abdff646 1961 AVCodecContext *dec= is->audio_st->codec;
72ea344b 1962 int n, len1, data_size;
01310af2
FB
1963 double pts;
1964
1965 for(;;) {
72ea344b 1966 /* NOTE: the audio packet can contain several frames */
bea18375 1967 while (pkt_temp->size > 0) {
5a4476e2 1968 data_size = sizeof(is->audio_buf1);
bea18375 1969 len1 = avcodec_decode_audio3(dec,
5a4476e2 1970 (int16_t *)is->audio_buf1, &data_size,
bea18375 1971 pkt_temp);
72ea344b
FB
1972 if (len1 < 0) {
1973 /* if error, we skip the frame */
bea18375 1974 pkt_temp->size = 0;
01310af2 1975 break;
72ea344b 1976 }
115329f1 1977
bea18375
TB
1978 pkt_temp->data += len1;
1979 pkt_temp->size -= len1;
72ea344b
FB
1980 if (data_size <= 0)
1981 continue;
5a4476e2
PR
1982
1983 if (dec->sample_fmt != is->audio_src_fmt) {
1984 if (is->reformat_ctx)
1985 av_audio_convert_free(is->reformat_ctx);
1986 is->reformat_ctx= av_audio_convert_alloc(SAMPLE_FMT_S16, 1,
1987 dec->sample_fmt, 1, NULL, 0);
1988 if (!is->reformat_ctx) {
1989 fprintf(stderr, "Cannot convert %s sample format to %s sample format\n",
1990 avcodec_get_sample_fmt_name(dec->sample_fmt),
1991 avcodec_get_sample_fmt_name(SAMPLE_FMT_S16));
1992 break;
1993 }
1994 is->audio_src_fmt= dec->sample_fmt;
1995 }
1996
1997 if (is->reformat_ctx) {
1998 const void *ibuf[6]= {is->audio_buf1};
1999 void *obuf[6]= {is->audio_buf2};
2000 int istride[6]= {av_get_bits_per_sample_format(dec->sample_fmt)/8};
2001 int ostride[6]= {2};
2002 int len= data_size/istride[0];
2003 if (av_audio_convert(is->reformat_ctx, obuf, ostride, ibuf, istride, len)<0) {
2004 printf("av_audio_convert() failed\n");
2005 break;
2006 }
2007 is->audio_buf= is->audio_buf2;
2008 /* FIXME: existing code assume that data_size equals framesize*channels*2
2009 remove this legacy cruft */
2010 data_size= len*2;
2011 }else{
2012 is->audio_buf= is->audio_buf1;
2013 }
2014
72ea344b
FB
2015 /* if no pts, then compute it */
2016 pts = is->audio_clock;
2017 *pts_ptr = pts;
abdff646 2018 n = 2 * dec->channels;
115329f1 2019 is->audio_clock += (double)data_size /
abdff646 2020 (double)(n * dec->sample_rate);
638c9d91 2021#if defined(DEBUG_SYNC)
72ea344b
FB
2022 {
2023 static double last_clock;
2024 printf("audio: delay=%0.3f clock=%0.3f pts=%0.3f\n",
2025 is->audio_clock - last_clock,
2026 is->audio_clock, pts);
2027 last_clock = is->audio_clock;
01310af2 2028 }
72ea344b
FB
2029#endif
2030 return data_size;
01310af2
FB
2031 }
2032
72ea344b
FB
2033 /* free the current packet */
2034 if (pkt->data)
01310af2 2035 av_free_packet(pkt);
115329f1 2036
72ea344b
FB
2037 if (is->paused || is->audioq.abort_request) {
2038 return -1;
2039 }
115329f1 2040
01310af2
FB
2041 /* read next packet */
2042 if (packet_queue_get(&is->audioq, pkt, 1) < 0)
2043 return -1;
39c6a118 2044 if(pkt->data == flush_pkt.data){
abdff646 2045 avcodec_flush_buffers(dec);
39c6a118
MN
2046 continue;
2047 }
2048
bea18375
TB
2049 pkt_temp->data = pkt->data;
2050 pkt_temp->size = pkt->size;
115329f1 2051
72ea344b
FB
2052 /* if update the audio clock with the pts */
2053 if (pkt->pts != AV_NOPTS_VALUE) {
c0df9d75 2054 is->audio_clock = av_q2d(is->audio_st->time_base)*pkt->pts;
72ea344b 2055 }
01310af2
FB
2056 }
2057}
2058
638c9d91
FB
2059/* get the current audio output buffer size, in samples. With SDL, we
2060 cannot have a precise information */
2061static int audio_write_get_buf_size(VideoState *is)
01310af2 2062{
b09b580b 2063 return is->audio_buf_size - is->audio_buf_index;
01310af2
FB
2064}
2065
2066
2067/* prepare a new audio buffer */
358061f6 2068static void sdl_audio_callback(void *opaque, Uint8 *stream, int len)
01310af2
FB
2069{
2070 VideoState *is = opaque;
2071 int audio_size, len1;
2072 double pts;
2073
2074 audio_callback_time = av_gettime();
115329f1 2075
01310af2
FB
2076 while (len > 0) {
2077 if (is->audio_buf_index >= is->audio_buf_size) {
5a4476e2 2078 audio_size = audio_decode_frame(is, &pts);
01310af2
FB
2079 if (audio_size < 0) {
2080 /* if error, just output silence */
1a1078fa 2081 is->audio_buf = is->audio_buf1;
01310af2
FB
2082 is->audio_buf_size = 1024;
2083 memset(is->audio_buf, 0, is->audio_buf_size);
2084 } else {
2085 if (is->show_audio)
2086 update_sample_display(is, (int16_t *)is->audio_buf, audio_size);
115329f1 2087 audio_size = synchronize_audio(is, (int16_t *)is->audio_buf, audio_size,
01310af2
FB
2088 pts);
2089 is->audio_buf_size = audio_size;
2090 }
2091 is->audio_buf_index = 0;
2092 }
2093 len1 = is->audio_buf_size - is->audio_buf_index;
2094 if (len1 > len)
2095 len1 = len;
2096 memcpy(stream, (uint8_t *)is->audio_buf + is->audio_buf_index, len1);
2097 len -= len1;
2098 stream += len1;
2099 is->audio_buf_index += len1;
2100 }
2101}
2102
01310af2
FB
2103/* open a given stream. Return 0 if OK */
2104static int stream_component_open(VideoState *is, int stream_index)
2105{
2106 AVFormatContext *ic = is->ic;
fe74099a 2107 AVCodecContext *avctx;
01310af2
FB
2108 AVCodec *codec;
2109 SDL_AudioSpec wanted_spec, spec;
2110
2111 if (stream_index < 0 || stream_index >= ic->nb_streams)
2112 return -1;
fe74099a 2113 avctx = ic->streams[stream_index]->codec;
115329f1 2114
01310af2 2115 /* prepare audio output */
fe74099a
SS
2116 if (avctx->codec_type == CODEC_TYPE_AUDIO) {
2117 if (avctx->channels > 0) {
2118 avctx->request_channels = FFMIN(2, avctx->channels);
94eadc8b 2119 } else {
fe74099a 2120 avctx->request_channels = 2;
638c9d91 2121 }
01310af2
FB
2122 }
2123
fe74099a
SS
2124 codec = avcodec_find_decoder(avctx->codec_id);
2125 avctx->debug_mv = debug_mv;
2126 avctx->debug = debug;
2127 avctx->workaround_bugs = workaround_bugs;
2128 avctx->lowres = lowres;
2129 if(lowres) avctx->flags |= CODEC_FLAG_EMU_EDGE;
2130 avctx->idct_algo= idct;
2131 if(fast) avctx->flags2 |= CODEC_FLAG2_FAST;
2132 avctx->skip_frame= skip_frame;
2133 avctx->skip_idct= skip_idct;
2134 avctx->skip_loop_filter= skip_loop_filter;
2135 avctx->error_recognition= error_recognition;
2136 avctx->error_concealment= error_concealment;
2137 avcodec_thread_init(avctx, thread_count);
2138
2139 set_context_opts(avctx, avcodec_opts[avctx->codec_type], 0);
e43d7a18 2140
01310af2 2141 if (!codec ||
fe74099a 2142 avcodec_open(avctx, codec) < 0)
01310af2 2143 return -1;
51b73087
JR
2144
2145 /* prepare audio output */
fe74099a
SS
2146 if (avctx->codec_type == CODEC_TYPE_AUDIO) {
2147 wanted_spec.freq = avctx->sample_rate;
51b73087 2148 wanted_spec.format = AUDIO_S16SYS;
fe74099a 2149 wanted_spec.channels = avctx->channels;
51b73087
JR
2150 wanted_spec.silence = 0;
2151 wanted_spec.samples = SDL_AUDIO_BUFFER_SIZE;
2152 wanted_spec.callback = sdl_audio_callback;
2153 wanted_spec.userdata = is;
2154 if (SDL_OpenAudio(&wanted_spec, &spec) < 0) {
2155 fprintf(stderr, "SDL_OpenAudio: %s\n", SDL_GetError());
2156 return -1;
2157 }
2158 is->audio_hw_buf_size = spec.size;
5a4476e2 2159 is->audio_src_fmt= SAMPLE_FMT_S16;
51b73087
JR
2160 }
2161
3f3fe38d 2162 ic->streams[stream_index]->discard = AVDISCARD_DEFAULT;
fe74099a 2163 switch(avctx->codec_type) {
01310af2
FB
2164 case CODEC_TYPE_AUDIO:
2165 is->audio_stream = stream_index;
2166 is->audio_st = ic->streams[stream_index];
2167 is->audio_buf_size = 0;
2168 is->audio_buf_index = 0;
638c9d91
FB
2169
2170 /* init averaging filter */
2171 is->audio_diff_avg_coef = exp(log(0.01) / AUDIO_DIFF_AVG_NB);
2172 is->audio_diff_avg_count = 0;
2173 /* since we do not have a precise anough audio fifo fullness,
2174 we correct audio sync only if larger than this threshold */
fe74099a 2175 is->audio_diff_threshold = 2.0 * SDL_AUDIO_BUFFER_SIZE / avctx->sample_rate;
638c9d91 2176
01310af2
FB
2177 memset(&is->audio_pkt, 0, sizeof(is->audio_pkt));
2178 packet_queue_init(&is->audioq);
bb270c08 2179 SDL_PauseAudio(0);
01310af2
FB
2180 break;
2181 case CODEC_TYPE_VIDEO:
2182 is->video_stream = stream_index;
2183 is->video_st = ic->streams[stream_index];
2184
68aefbe8 2185// is->video_current_pts_time = av_gettime();
638c9d91 2186
01310af2
FB
2187 packet_queue_init(&is->videoq);
2188 is->video_tid = SDL_CreateThread(video_thread, is);
2189 break;
72ce053b
IC
2190 case CODEC_TYPE_SUBTITLE:
2191 is->subtitle_stream = stream_index;
2192 is->subtitle_st = ic->streams[stream_index];
2193 packet_queue_init(&is->subtitleq);
115329f1 2194
72ce053b
IC
2195 is->subtitle_tid = SDL_CreateThread(subtitle_thread, is);
2196 break;
01310af2
FB
2197 default:
2198 break;
2199 }
2200 return 0;
2201}
2202
2203static void stream_component_close(VideoState *is, int stream_index)
2204{
2205 AVFormatContext *ic = is->ic;
fe74099a 2206 AVCodecContext *avctx;
115329f1 2207
72ce053b
IC
2208 if (stream_index < 0 || stream_index >= ic->nb_streams)
2209 return;
fe74099a 2210 avctx = ic->streams[stream_index]->codec;
01310af2 2211
fe74099a 2212 switch(avctx->codec_type) {
01310af2
FB
2213 case CODEC_TYPE_AUDIO:
2214 packet_queue_abort(&is->audioq);
2215
2216 SDL_CloseAudio();
2217
2218 packet_queue_end(&is->audioq);
5a4476e2
PR
2219 if (is->reformat_ctx)
2220 av_audio_convert_free(is->reformat_ctx);
bc77fce6 2221 is->reformat_ctx = NULL;
01310af2
FB
2222 break;
2223 case CODEC_TYPE_VIDEO:
2224 packet_queue_abort(&is->videoq);
2225
2226 /* note: we also signal this mutex to make sure we deblock the
2227 video thread in all cases */
2228 SDL_LockMutex(is->pictq_mutex);
2229 SDL_CondSignal(is->pictq_cond);
2230 SDL_UnlockMutex(is->pictq_mutex);
2231
2232 SDL_WaitThread(is->video_tid, NULL);
2233
2234 packet_queue_end(&is->videoq);
2235 break;
72ce053b
IC
2236 case CODEC_TYPE_SUBTITLE:
2237 packet_queue_abort(&is->subtitleq);
115329f1 2238
72ce053b
IC
2239 /* note: we also signal this mutex to make sure we deblock the
2240 video thread in all cases */
2241 SDL_LockMutex(is->subpq_mutex);
2242 is->subtitle_stream_changed = 1;
115329f1 2243
72ce053b
IC
2244 SDL_CondSignal(is->subpq_cond);
2245 SDL_UnlockMutex(is->subpq_mutex);
2246
2247 SDL_WaitThread(is->subtitle_tid, NULL);
2248
2249 packet_queue_end(&is->subtitleq);
2250 break;
01310af2
FB
2251 default:
2252 break;
2253 }
2254
3f3fe38d 2255 ic->streams[stream_index]->discard = AVDISCARD_ALL;
fe74099a
SS
2256 avcodec_close(avctx);
2257 switch(avctx->codec_type) {
01310af2
FB
2258 case CODEC_TYPE_AUDIO:
2259 is->audio_st = NULL;
2260 is->audio_stream = -1;
2261 break;
2262 case CODEC_TYPE_VIDEO:
2263 is->video_st = NULL;
2264 is->video_stream = -1;
2265 break;
72ce053b
IC
2266 case CODEC_TYPE_SUBTITLE:
2267 is->subtitle_st = NULL;
2268 is->subtitle_stream = -1;
2269 break;
01310af2
FB
2270 default:
2271 break;
2272 }
2273}
2274
416e3508
FB
2275/* since we have only one decoding thread, we can use a global
2276 variable instead of a thread local variable */
2277static VideoState *global_video_state;
2278
2279static int decode_interrupt_cb(void)
2280{
2281 return (global_video_state && global_video_state->abort_request);
2282}
01310af2
FB
2283
2284/* this thread gets the stream from the disk or the network */
2285static int decode_thread(void *arg)
2286{
2287 VideoState *is = arg;
2288 AVFormatContext *ic;
6625a3de
MN
2289 int err, i, ret;
2290 int st_index[CODEC_TYPE_NB];
256ab3ed 2291 int st_count[CODEC_TYPE_NB]={0};
9f7490a0 2292 int st_best_packet_count[CODEC_TYPE_NB];
01310af2 2293 AVPacket pkt1, *pkt = &pkt1;
61890b02 2294 AVFormatParameters params, *ap = &params;
75bb7b0a 2295 int eof=0;
01310af2 2296
6299a229
MN
2297 ic = avformat_alloc_context();
2298
6625a3de 2299 memset(st_index, -1, sizeof(st_index));
9f7490a0 2300 memset(st_best_packet_count, -1, sizeof(st_best_packet_count));
01310af2
FB
2301 is->video_stream = -1;
2302 is->audio_stream = -1;
72ce053b 2303 is->subtitle_stream = -1;
01310af2 2304
416e3508
FB
2305 global_video_state = is;
2306 url_set_interrupt_cb(decode_interrupt_cb);
2307
61890b02 2308 memset(ap, 0, sizeof(*ap));
115329f1 2309
6299a229 2310 ap->prealloced_context = 1;
e4b89522
LW
2311 ap->width = frame_width;
2312 ap->height= frame_height;
7e042912 2313 ap->time_base= (AVRational){1, 25};
e4b89522 2314 ap->pix_fmt = frame_pix_fmt;
7e042912 2315
6299a229
MN
2316 set_context_opts(ic, avformat_opts, AV_OPT_FLAG_DECODING_PARAM);
2317
61890b02 2318 err = av_open_input_file(&ic, is->filename, is->iformat, 0, ap);
638c9d91
FB
2319 if (err < 0) {
2320 print_error(is->filename, err);
2321 ret = -1;
2322 goto fail;
2323 }
01310af2 2324 is->ic = ic;
30bc6613
MN
2325
2326 if(genpts)
2327 ic->flags |= AVFMT_FLAG_GENPTS;
2328
24c07998
LA
2329 err = av_find_stream_info(ic);
2330 if (err < 0) {
2331 fprintf(stderr, "%s: could not find codec parameters\n", is->filename);
2332 ret = -1;
2333 goto fail;
2334 }
899681cd
BA
2335 if(ic->pb)
2336 ic->pb->eof_reached= 0; //FIXME hack, ffplay maybe should not use url_feof() to test for the end
72ea344b 2337
70a4764d
MN
2338 if(seek_by_bytes<0)
2339 seek_by_bytes= !!(ic->iformat->flags & AVFMT_TS_DISCONT);
2340
72ea344b
FB
2341 /* if seeking requested, we execute it */
2342 if (start_time != AV_NOPTS_VALUE) {
2343 int64_t timestamp;
2344
2345 timestamp = start_time;
2346 /* add the stream start time */
2347 if (ic->start_time != AV_NOPTS_VALUE)
2348 timestamp += ic->start_time;
4ed29207 2349 ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0);
72ea344b 2350 if (ret < 0) {
115329f1 2351 fprintf(stderr, "%s: could not seek to position %0.3f\n",
72ea344b
FB
2352 is->filename, (double)timestamp / AV_TIME_BASE);
2353 }
2354 }
72ea344b 2355
01310af2 2356 for(i = 0; i < ic->nb_streams; i++) {
9f7490a0
MN
2357 AVStream *st= ic->streams[i];
2358 AVCodecContext *avctx = st->codec;
3f3fe38d 2359 ic->streams[i]->discard = AVDISCARD_ALL;
256ab3ed 2360 if(avctx->codec_type >= (unsigned)CODEC_TYPE_NB)
8ef94840 2361 continue;
256ab3ed
MN
2362 if(st_count[avctx->codec_type]++ != wanted_stream[avctx->codec_type] && wanted_stream[avctx->codec_type] >= 0)
2363 continue;
2364
9f7490a0
MN
2365 if(st_best_packet_count[avctx->codec_type] >= st->codec_info_nb_frames)
2366 continue;
2367 st_best_packet_count[avctx->codec_type]= st->codec_info_nb_frames;
2368
fe74099a 2369 switch(avctx->codec_type) {
01310af2 2370 case CODEC_TYPE_AUDIO:
256ab3ed 2371 if (!audio_disable)
6625a3de 2372 st_index[CODEC_TYPE_AUDIO] = i;
01310af2
FB
2373 break;
2374 case CODEC_TYPE_VIDEO:
16a59a7b 2375 case CODEC_TYPE_SUBTITLE:
256ab3ed
MN
2376 if (!video_disable)
2377 st_index[avctx->codec_type] = i;
16a59a7b 2378 break;
01310af2
FB
2379 default:
2380 break;
2381 }
2382 }
2383 if (show_status) {
2384 dump_format(ic, 0, is->filename, 0);
2385 }
2386
2387 /* open the streams */
6625a3de
MN
2388 if (st_index[CODEC_TYPE_AUDIO] >= 0) {
2389 stream_component_open(is, st_index[CODEC_TYPE_AUDIO]);
01310af2
FB
2390 }
2391
077a8d61 2392 ret=-1;
6625a3de
MN
2393 if (st_index[CODEC_TYPE_VIDEO] >= 0) {
2394 ret= stream_component_open(is, st_index[CODEC_TYPE_VIDEO]);
077a8d61 2395 }
d38c9e7a 2396 is->refresh_tid = SDL_CreateThread(refresh_thread, is);
077a8d61 2397 if(ret<0) {
01310af2 2398 if (!display_disable)
bf8ae197 2399 is->show_audio = 2;
01310af2
FB
2400 }
2401
6625a3de
MN
2402 if (st_index[CODEC_TYPE_SUBTITLE] >= 0) {
2403 stream_component_open(is, st_index[CODEC_TYPE_SUBTITLE]);
16a59a7b
BA
2404 }
2405
01310af2 2406 if (is->video_stream < 0 && is->audio_stream < 0) {
638c9d91
FB
2407 fprintf(stderr, "%s: could not open codecs\n", is->filename);
2408 ret = -1;
01310af2
FB
2409 goto fail;
2410 }
2411
2412 for(;;) {
2413 if (is->abort_request)
2414 break;
416e3508
FB
2415 if (is->paused != is->last_paused) {
2416 is->last_paused = is->paused;
72ea344b 2417 if (is->paused)
f5668147 2418 is->read_pause_return= av_read_pause(ic);
72ea344b
FB
2419 else
2420 av_read_play(ic);
416e3508 2421 }
2f642393
AJ
2422#if CONFIG_RTSP_DEMUXER
2423 if (is->paused && !strcmp(ic->iformat->name, "rtsp")) {
416e3508
FB
2424 /* wait 10 ms to avoid trying to get another packet */
2425 /* XXX: horrible */
2426 SDL_Delay(10);
2427 continue;
2428 }
400738b1 2429#endif
72ea344b 2430 if (is->seek_req) {
8e606cc8 2431 int64_t seek_target= is->seek_pos;
4ed29207
MN
2432 int64_t seek_min= is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN;
2433 int64_t seek_max= is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX;
2434//FIXME the +-2 is due to rounding being not done in the correct direction in generation
2435// of the seek_pos/seek_rel variables
8e606cc8 2436
4ed29207 2437 ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags);
72ea344b
FB
2438 if (ret < 0) {
2439 fprintf(stderr, "%s: error while seeking\n", is->ic->filename);
e6c0297f
MN
2440 }else{
2441 if (is->audio_stream >= 0) {
2442 packet_queue_flush(&is->audioq);
39c6a118 2443 packet_queue_put(&is->audioq, &flush_pkt);
e6c0297f 2444 }
72ce053b
IC
2445 if (is->subtitle_stream >= 0) {
2446 packet_queue_flush(&is->subtitleq);
39c6a118 2447 packet_queue_put(&is->subtitleq, &flush_pkt);
72ce053b 2448 }
e6c0297f
MN
2449 if (is->video_stream >= 0) {
2450 packet_queue_flush(&is->videoq);
39c6a118 2451 packet_queue_put(&is->videoq, &flush_pkt);
e6c0297f 2452 }
72ea344b
FB
2453 }
2454 is->seek_req = 0;
e45aeb38 2455 eof= 0;
72ea344b 2456 }
416e3508 2457
01310af2 2458 /* if the queue are full, no need to read more */
79ee4683
MN
2459 if ( is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE
2460 || ( (is->audioq .size > MIN_AUDIOQ_SIZE || is->audio_stream<0)
2461 && (is->videoq .nb_packets > MIN_FRAMES || is->video_stream<0)
2462 && (is->subtitleq.nb_packets > MIN_FRAMES || is->subtitle_stream<0))) {
01310af2
FB
2463 /* wait 10 ms */
2464 SDL_Delay(10);
2465 continue;
2466 }
75bb7b0a 2467 if(url_feof(ic->pb) || eof) {
9dc41767 2468 if(is->video_stream >= 0){
26534fe8
MN
2469 av_init_packet(pkt);
2470 pkt->data=NULL;
2471 pkt->size=0;
2472 pkt->stream_index= is->video_stream;
2473 packet_queue_put(&is->videoq, pkt);
9dc41767 2474 }
b4083171 2475 SDL_Delay(10);
2d1653b0
MN
2476 if(autoexit && is->audioq.size + is->videoq.size + is->subtitleq.size ==0){
2477 ret=AVERROR_EOF;
2478 goto fail;
2479 }
600a331c
MN
2480 continue;
2481 }
72ea344b 2482 ret = av_read_frame(ic, pkt);
01310af2 2483 if (ret < 0) {
75bb7b0a
MN
2484 if (ret == AVERROR_EOF)
2485 eof=1;
2486 if (url_ferror(ic->pb))
bb270c08 2487 break;
75bb7b0a
MN
2488 SDL_Delay(100); /* wait for user event */
2489 continue;
01310af2
FB
2490 }
2491 if (pkt->stream_index == is->audio_stream) {
2492 packet_queue_put(&is->audioq, pkt);
2493 } else if (pkt->stream_index == is->video_stream) {
2494 packet_queue_put(&is->videoq, pkt);
72ce053b
IC
2495 } else if (pkt->stream_index == is->subtitle_stream) {
2496 packet_queue_put(&is->subtitleq, pkt);
01310af2
FB
2497 } else {
2498 av_free_packet(pkt);
2499 }
2500 }
2501 /* wait until the end */
2502 while (!is->abort_request) {
2503 SDL_Delay(100);
2504 }
2505
638c9d91 2506 ret = 0;
01310af2 2507 fail:
416e3508
FB
2508 /* disable interrupting */
2509 global_video_state = NULL;
2510
01310af2
FB
2511 /* close each stream */
2512 if (is->audio_stream >= 0)
2513 stream_component_close(is, is->audio_stream);
2514 if (is->video_stream >= 0)
2515 stream_component_close(is, is->video_stream);
72ce053b
IC
2516 if (is->subtitle_stream >= 0)
2517 stream_component_close(is, is->subtitle_stream);
638c9d91
FB
2518 if (is->ic) {
2519 av_close_input_file(is->ic);
2520 is->ic = NULL; /* safety */
2521 }
416e3508
FB
2522 url_set_interrupt_cb(NULL);
2523
638c9d91
FB
2524 if (ret != 0) {
2525 SDL_Event event;
115329f1 2526
638c9d91
FB
2527 event.type = FF_QUIT_EVENT;
2528 event.user.data1 = is;
2529 SDL_PushEvent(&event);
2530 }
01310af2
FB
2531 return 0;
2532}
2533
638c9d91 2534static VideoState *stream_open(const char *filename, AVInputFormat *iformat)
01310af2
FB
2535{
2536 VideoState *is;
2537
2538 is = av_mallocz(sizeof(VideoState));
2539 if (!is)
2540 return NULL;
f7d78f36 2541 av_strlcpy(is->filename, filename, sizeof(is->filename));
638c9d91 2542 is->iformat = iformat;
01310af2
FB
2543 is->ytop = 0;
2544 is->xleft = 0;
2545
2546 /* start video display */
2547 is->pictq_mutex = SDL_CreateMutex();
2548 is->pictq_cond = SDL_CreateCond();
115329f1 2549
72ce053b
IC
2550 is->subpq_mutex = SDL_CreateMutex();
2551 is->subpq_cond = SDL_CreateCond();
115329f1 2552
638c9d91 2553 is->av_sync_type = av_sync_type;
01310af2
FB
2554 is->parse_tid = SDL_CreateThread(decode_thread, is);
2555 if (!is->parse_tid) {
2556 av_free(is);
2557 return NULL;
2558 }
2559 return is;
2560}
2561
2562static void stream_close(VideoState *is)
2563{
2564 VideoPicture *vp;
2565 int i;
2566 /* XXX: use a special url_shutdown call to abort parse cleanly */
2567 is->abort_request = 1;
2568 SDL_WaitThread(is->parse_tid, NULL);
d38c9e7a 2569 SDL_WaitThread(is->refresh_tid, NULL);
01310af2
FB
2570
2571 /* free all pictures */
2572 for(i=0;i<VIDEO_PICTURE_QUEUE_SIZE; i++) {
2573 vp = &is->pictq[i];
917d2bb3
MN
2574#if CONFIG_AVFILTER
2575 if (vp->picref) {
2576 avfilter_unref_pic(vp->picref);
2577 vp->picref = NULL;
2578 }
2579#endif
01310af2
FB
2580 if (vp->bmp) {
2581 SDL_FreeYUVOverlay(vp->bmp);
2582 vp->bmp = NULL;
2583 }
2584 }
2585 SDL_DestroyMutex(is->pictq_mutex);
2586 SDL_DestroyCond(is->pictq_cond);
72ce053b
IC
2587 SDL_DestroyMutex(is->subpq_mutex);
2588 SDL_DestroyCond(is->subpq_cond);
917d2bb3 2589#if !CONFIG_AVFILTER
3ac56e28
MS
2590 if (is->img_convert_ctx)
2591 sws_freeContext(is->img_convert_ctx);
917d2bb3 2592#endif
7c5ab145 2593 av_free(is);
01310af2
FB
2594}
2595
7b49ce2e 2596static void stream_cycle_channel(VideoState *is, int codec_type)
638c9d91
FB
2597{
2598 AVFormatContext *ic = is->ic;
2599 int start_index, stream_index;
2600 AVStream *st;
2601
2602 if (codec_type == CODEC_TYPE_VIDEO)
2603 start_index = is->video_stream;
72ce053b 2604 else if (codec_type == CODEC_TYPE_AUDIO)
638c9d91 2605 start_index = is->audio_stream;
72ce053b
IC
2606 else
2607 start_index = is->subtitle_stream;
2608 if (start_index < (codec_type == CODEC_TYPE_SUBTITLE ? -1 : 0))
638c9d91
FB
2609 return;
2610 stream_index = start_index;
2611 for(;;) {
2612 if (++stream_index >= is->ic->nb_streams)
72ce053b
IC
2613 {
2614 if (codec_type == CODEC_TYPE_SUBTITLE)
2615 {
2616 stream_index = -1;
2617 goto the_end;
2618 } else
2619 stream_index = 0;
2620 }
638c9d91
FB
2621 if (stream_index == start_index)
2622 return;
2623 st = ic->streams[stream_index];
01f4895c 2624 if (st->codec->codec_type == codec_type) {
638c9d91
FB
2625 /* check that parameters are OK */
2626 switch(codec_type) {
2627 case CODEC_TYPE_AUDIO:
01f4895c
MN
2628 if (st->codec->sample_rate != 0 &&
2629 st->codec->channels != 0)
638c9d91
FB
2630 goto the_end;
2631 break;
2632 case CODEC_TYPE_VIDEO:
72ce053b 2633 case CODEC_TYPE_SUBTITLE:
638c9d91
FB
2634 goto the_end;
2635 default:
2636 break;
2637 }
2638 }
2639 }
2640 the_end:
2641 stream_component_close(is, start_index);
2642 stream_component_open(is, stream_index);
2643}
2644
2645
7b49ce2e 2646static void toggle_full_screen(void)
01310af2 2647{
01310af2 2648 is_full_screen = !is_full_screen;
29f3b38a
MR
2649 if (!fs_screen_width) {
2650 /* use default SDL method */
fb84155b 2651// SDL_WM_ToggleFullScreen(screen);
01310af2 2652 }
fb84155b 2653 video_open(cur_stream);
01310af2
FB
2654}
2655
7b49ce2e 2656static void toggle_pause(void)
01310af2
FB
2657{
2658 if (cur_stream)
2659 stream_pause(cur_stream);
bba04f1e
WH
2660 step = 0;
2661}
2662
7b49ce2e 2663static void step_to_next_frame(void)
bba04f1e
WH
2664{
2665 if (cur_stream) {
19cc524a 2666 /* if the stream is paused unpause it, then step */
bba04f1e 2667 if (cur_stream->paused)
19cc524a 2668 stream_pause(cur_stream);
bba04f1e
WH
2669 }
2670 step = 1;
01310af2
FB
2671}
2672
7b49ce2e 2673static void do_exit(void)
01310af2 2674{
7c5ab145 2675 int i;
01310af2
FB
2676 if (cur_stream) {
2677 stream_close(cur_stream);
2678 cur_stream = NULL;
2679 }
7c5ab145
MS
2680 for (i = 0; i < CODEC_TYPE_NB; i++)
2681 av_free(avcodec_opts[i]);
2682 av_free(avformat_opts);
2683 av_free(sws_opts);
917d2bb3
MN
2684#if CONFIG_AVFILTER
2685 avfilter_uninit();
2686#endif
01310af2
FB
2687 if (show_status)
2688 printf("\n");
2689 SDL_Quit();
2690 exit(0);
2691}
2692
7b49ce2e 2693static void toggle_audio_display(void)
01310af2
FB
2694{
2695 if (cur_stream) {
f5968788 2696 int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00);
12eeda34 2697 cur_stream->show_audio = (cur_stream->show_audio + 1) % 3;
f5968788
MN
2698 fill_rectangle(screen,
2699 cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height,
2700 bgcolor);
2701 SDL_UpdateRect(screen, cur_stream->xleft, cur_stream->ytop, cur_stream->width, cur_stream->height);
01310af2
FB
2702 }
2703}
2704
2705/* handle an event sent by the GUI */
7b49ce2e 2706static void event_loop(void)
01310af2
FB
2707{
2708 SDL_Event event;
a11d11aa 2709 double incr, pos, frac;
01310af2
FB
2710
2711 for(;;) {
d52ec002 2712 double x;
01310af2
FB
2713 SDL_WaitEvent(&event);
2714 switch(event.type) {
2715 case SDL_KEYDOWN:
2716 switch(event.key.keysym.sym) {
2717 case SDLK_ESCAPE:
2718 case SDLK_q:
2719 do_exit();
2720 break;
2721 case SDLK_f:
2722 toggle_full_screen();
2723 break;
2724 case SDLK_p:
2725 case SDLK_SPACE:
2726 toggle_pause();
2727 break;
bba04f1e
WH
2728 case SDLK_s: //S: Step to next frame
2729 step_to_next_frame();
2730 break;
01310af2 2731 case SDLK_a:
115329f1 2732 if (cur_stream)
638c9d91
FB
2733 stream_cycle_channel(cur_stream, CODEC_TYPE_AUDIO);
2734 break;
2735 case SDLK_v:
115329f1 2736 if (cur_stream)
638c9d91
FB
2737 stream_cycle_channel(cur_stream, CODEC_TYPE_VIDEO);
2738 break;
72ce053b 2739 case SDLK_t:
115329f1 2740 if (cur_stream)
72ce053b
IC
2741 stream_cycle_channel(cur_stream, CODEC_TYPE_SUBTITLE);
2742 break;
638c9d91 2743 case SDLK_w:
01310af2
FB
2744 toggle_audio_display();
2745 break;
72ea344b
FB
2746 case SDLK_LEFT:
2747 incr = -10.0;
2748 goto do_seek;
2749 case SDLK_RIGHT:
2750 incr = 10.0;
2751 goto do_seek;
2752 case SDLK_UP:
2753 incr = 60.0;
2754 goto do_seek;
2755 case SDLK_DOWN:
2756 incr = -60.0;
2757 do_seek:
2758 if (cur_stream) {
94b594c6 2759 if (seek_by_bytes) {
1a620dd7
MN
2760 if (cur_stream->video_stream >= 0 && cur_stream->video_current_pos>=0){
2761 pos= cur_stream->video_current_pos;
2762 }else if(cur_stream->audio_stream >= 0 && cur_stream->audio_pkt.pos>=0){
2763 pos= cur_stream->audio_pkt.pos;
2764 }else
2765 pos = url_ftell(cur_stream->ic->pb);
94b594c6 2766 if (cur_stream->ic->bit_rate)
566cd2cb 2767 incr *= cur_stream->ic->bit_rate / 8.0;
94b594c6
SH
2768 else
2769 incr *= 180000.0;
2770 pos += incr;
2ef46053 2771 stream_seek(cur_stream, pos, incr, 1);
94b594c6
SH
2772 } else {
2773 pos = get_master_clock(cur_stream);
2774 pos += incr;
2ef46053 2775 stream_seek(cur_stream, (int64_t)(pos * AV_TIME_BASE), (int64_t)(incr * AV_TIME_BASE), 0);
94b594c6 2776 }
72ea344b
FB
2777 }
2778 break;
01310af2
FB
2779 default:
2780 break;
2781 }
2782 break;
a11d11aa 2783 case SDL_MOUSEBUTTONDOWN:
d52ec002
MN
2784 case SDL_MOUSEMOTION:
2785 if(event.type ==SDL_MOUSEBUTTONDOWN){
2786 x= event.button.x;
2787 }else{
2788 if(event.motion.state != SDL_PRESSED)
2789 break;
2790 x= event.motion.x;
2791 }
bb270c08 2792 if (cur_stream) {
2ef46053
MN
2793 if(seek_by_bytes || cur_stream->ic->duration<=0){
2794 uint64_t size= url_fsize(cur_stream->ic->pb);
d52ec002 2795 stream_seek(cur_stream, size*x/cur_stream->width, 0, 1);
2ef46053 2796 }else{
6371c81a
MN
2797 int64_t ts;
2798 int ns, hh, mm, ss;
2799 int tns, thh, tmm, tss;
2800 tns = cur_stream->ic->duration/1000000LL;
2801 thh = tns/3600;
2802 tmm = (tns%3600)/60;
2803 tss = (tns%60);
d52ec002 2804 frac = x/cur_stream->width;
6371c81a
MN
2805 ns = frac*tns;
2806 hh = ns/3600;
2807 mm = (ns%3600)/60;
2808 ss = (ns%60);
2809 fprintf(stderr, "Seek to %2.0f%% (%2d:%02d:%02d) of total duration (%2d:%02d:%02d) \n", frac*100,
2810 hh, mm, ss, thh, tmm, tss);
2811 ts = frac*cur_stream->ic->duration;
2812 if (cur_stream->ic->start_time != AV_NOPTS_VALUE)
2813 ts += cur_stream->ic->start_time;
2814 stream_seek(cur_stream, ts, 0, 0);
2ef46053 2815 }
bb270c08
DB
2816 }
2817 break;
01310af2
FB
2818 case SDL_VIDEORESIZE:
2819 if (cur_stream) {
115329f1 2820 screen = SDL_SetVideoMode(event.resize.w, event.resize.h, 0,
01310af2 2821 SDL_HWSURFACE|SDL_RESIZABLE|SDL_ASYNCBLIT|SDL_HWACCEL);
c57d3469
MN
2822 screen_width = cur_stream->width = event.resize.w;
2823 screen_height= cur_stream->height= event.resize.h;
01310af2
FB
2824 }
2825 break;
2826 case SDL_QUIT:
638c9d91 2827 case FF_QUIT_EVENT:
01310af2
FB
2828 do_exit();
2829 break;
2830 case FF_ALLOC_EVENT:
fccb19e3 2831 video_open(event.user.data1);
01310af2
FB
2832 alloc_picture(event.user.data1);
2833 break;
2834 case FF_REFRESH_EVENT:
2835 video_refresh_timer(event.user.data1);
d38c9e7a 2836 cur_stream->refresh=0;
01310af2
FB
2837 break;
2838 default:
2839 break;
2840 }
2841 }
2842}
2843
e4b89522
LW
2844static void opt_frame_size(const char *arg)
2845{
b33ece16 2846 if (av_parse_video_frame_size(&frame_width, &frame_height, arg) < 0) {
e4b89522
LW
2847 fprintf(stderr, "Incorrect frame size\n");
2848 exit(1);
2849 }
2850 if ((frame_width % 2) != 0 || (frame_height % 2) != 0) {
2851 fprintf(stderr, "Frame size must be a multiple of 2\n");
2852 exit(1);
2853 }
2854}
2855
a5b3b5f6 2856static int opt_width(const char *opt, const char *arg)
01310af2 2857{
a5b3b5f6
SS
2858 screen_width = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2859 return 0;
01310af2
FB
2860}
2861
a5b3b5f6 2862static int opt_height(const char *opt, const char *arg)
01310af2 2863{
a5b3b5f6
SS
2864 screen_height = parse_number_or_die(opt, arg, OPT_INT64, 1, INT_MAX);
2865 return 0;
01310af2
FB
2866}
2867
2868static void opt_format(const char *arg)
2869{
2870 file_iformat = av_find_input_format(arg);
2871 if (!file_iformat) {
2872 fprintf(stderr, "Unknown input format: %s\n", arg);
2873 exit(1);
2874 }
2875}
61890b02 2876
e4b89522
LW
2877static void opt_frame_pix_fmt(const char *arg)
2878{
718c7b18 2879 frame_pix_fmt = av_get_pix_fmt(arg);
e4b89522
LW
2880}
2881
b81d6235 2882static int opt_sync(const char *opt, const char *arg)
638c9d91
FB
2883{
2884 if (!strcmp(arg, "audio"))
2885 av_sync_type = AV_SYNC_AUDIO_MASTER;
2886 else if (!strcmp(arg, "video"))
2887 av_sync_type = AV_SYNC_VIDEO_MASTER;
2888 else if (!strcmp(arg, "ext"))
2889 av_sync_type = AV_SYNC_EXTERNAL_CLOCK;
aab1b7e5 2890 else {
b81d6235 2891 fprintf(stderr, "Unknown value for %s: %s\n", opt, arg);
aab1b7e5
SS
2892 exit(1);
2893 }
b81d6235 2894 return 0;
638c9d91
FB
2895}
2896
e11bc2c6 2897static int opt_seek(const char *opt, const char *arg)
72ea344b 2898{
e11bc2c6
SS
2899 start_time = parse_time_or_die(opt, arg, 1);
2900 return 0;
72ea344b
FB
2901}
2902
a5b3b5f6 2903static int opt_debug(const char *opt, const char *arg)
e26a8335 2904{
a309073b 2905 av_log_set_level(99);
a5b3b5f6
SS
2906 debug = parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
2907 return 0;
e26a8335 2908}
115329f1 2909
a5b3b5f6 2910static int opt_vismv(const char *opt, const char *arg)
0c9bbaec 2911{
a5b3b5f6
SS
2912 debug_mv = parse_number_or_die(opt, arg, OPT_INT64, INT_MIN, INT_MAX);
2913 return 0;
0c9bbaec 2914}
c62c07d3 2915
a5b3b5f6 2916static int opt_thread_count(const char *opt, const char *arg)
c62c07d3 2917{
a5b3b5f6 2918 thread_count= parse_number_or_die(opt, arg, OPT_INT64, 0, INT_MAX);
b250f9c6 2919#if !HAVE_THREADS
c62c07d3
MN
2920 fprintf(stderr, "Warning: not compiled with thread support, using thread emulation\n");
2921#endif
a5b3b5f6 2922 return 0;
c62c07d3 2923}
115329f1 2924
358061f6 2925static const OptionDef options[] = {
992f8eae 2926#include "cmdutils_common_opts.h"
a5b3b5f6
SS
2927 { "x", HAS_ARG | OPT_FUNC2, {(void*)opt_width}, "force displayed width", "width" },
2928 { "y", HAS_ARG | OPT_FUNC2, {(void*)opt_height}, "force displayed height", "height" },
e4b89522 2929 { "s", HAS_ARG | OPT_VIDEO, {(void*)opt_frame_size}, "set frame size (WxH or abbreviation)", "size" },
638c9d91 2930 { "fs", OPT_BOOL, {(void*)&is_full_screen}, "force full screen" },
01310af2
FB
2931 { "an", OPT_BOOL, {(void*)&audio_disable}, "disable audio" },
2932 { "vn", OPT_BOOL, {(void*)&video_disable}, "disable video" },
5b369983
MN
2933 { "ast", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[CODEC_TYPE_AUDIO]}, "select desired audio stream", "stream_number" },
2934 { "vst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[CODEC_TYPE_VIDEO]}, "select desired video stream", "stream_number" },
2935 { "sst", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&wanted_stream[CODEC_TYPE_SUBTITLE]}, "select desired subtitle stream", "stream_number" },
e11bc2c6 2936 { "ss", HAS_ARG | OPT_FUNC2, {(void*)&opt_seek}, "seek to a given position in seconds", "pos" },
674fe163 2937 { "bytes", OPT_INT | HAS_ARG, {(void*)&seek_by_bytes}, "seek by bytes 0=off 1=on -1=auto", "val" },
01310af2
FB
2938 { "nodisp", OPT_BOOL, {(void*)&display_disable}, "disable graphical display" },
2939 { "f", HAS_ARG, {(void*)opt_format}, "force format", "fmt" },
e4b89522 2940 { "pix_fmt", HAS_ARG | OPT_EXPERT | OPT_VIDEO, {(void*)opt_frame_pix_fmt}, "set pixel format", "format" },
98ae6acf 2941 { "stats", OPT_BOOL | OPT_EXPERT, {(void*)&show_status}, "show status", "" },
a5b3b5f6 2942 { "debug", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_debug}, "print specific debug info", "" },
6387c3e6 2943 { "bug", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&workaround_bugs}, "workaround bugs", "" },
a5b3b5f6 2944 { "vismv", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_vismv}, "visualize motion vectors", "" },
6fc5b059 2945 { "fast", OPT_BOOL | OPT_EXPERT, {(void*)&fast}, "non spec compliant optimizations", "" },
30bc6613 2946 { "genpts", OPT_BOOL | OPT_EXPERT, {(void*)&genpts}, "generate pts", "" },
59055363 2947 { "drp", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&decoder_reorder_pts}, "let decoder reorder pts 0=off 1=on -1=auto", ""},
178fcca8 2948 { "lowres", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&lowres}, "", "" },
8c3eba7c
MN
2949 { "skiploop", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_loop_filter}, "", "" },
2950 { "skipframe", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_frame}, "", "" },
2951 { "skipidct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&skip_idct}, "", "" },
178fcca8 2952 { "idct", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&idct}, "set idct algo", "algo" },
047599a4 2953 { "er", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_recognition}, "set error detection threshold (0-4)", "threshold" },
1b51e051 2954 { "ec", OPT_INT | HAS_ARG | OPT_EXPERT, {(void*)&error_concealment}, "set error concealment options", "bit_mask" },
b81d6235 2955 { "sync", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_sync}, "set audio-video sync. type (type=audio/video/ext)", "type" },
a5b3b5f6 2956 { "threads", HAS_ARG | OPT_FUNC2 | OPT_EXPERT, {(void*)opt_thread_count}, "thread count", "count" },
2d1653b0 2957 { "autoexit", OPT_BOOL | OPT_EXPERT, {(void*)&autoexit}, "exit at the end", "" },
d38c9e7a 2958 { "framedrop", OPT_BOOL | OPT_EXPERT, {(void*)&framedrop}, "drop frames when cpu is too slow", "" },
917d2bb3
MN
2959#if CONFIG_AVFILTER
2960 { "vfilters", OPT_STRING | HAS_ARG, {(void*)&vfilters}, "video filters", "filter list" },
2961#endif
e43d7a18 2962 { "default", OPT_FUNC2 | HAS_ARG | OPT_AUDIO | OPT_VIDEO | OPT_EXPERT, {(void*)opt_default}, "generic catch all option", "" },
01310af2
FB
2963 { NULL, },
2964};
2965
0c2a18cb 2966static void show_usage(void)
01310af2 2967{
27daa420
RP
2968 printf("Simple media player\n");
2969 printf("usage: ffplay [options] input_file\n");
01310af2 2970 printf("\n");
0c2a18cb
RP
2971}
2972
2973static void show_help(void)
2974{
2975 show_usage();
02d504a7
FB
2976 show_help_options(options, "Main options:\n",
2977 OPT_EXPERT, 0);
2978 show_help_options(options, "\nAdvanced options:\n",
2979 OPT_EXPERT, OPT_EXPERT);
01310af2
FB
2980 printf("\nWhile playing:\n"
2981 "q, ESC quit\n"
2982 "f toggle full screen\n"
2983 "p, SPC pause\n"
638c9d91
FB
2984 "a cycle audio channel\n"
2985 "v cycle video channel\n"
72ce053b 2986 "t cycle subtitle channel\n"
638c9d91 2987 "w show audio waves\n"
72ea344b
FB
2988 "left/right seek backward/forward 10 seconds\n"
2989 "down/up seek backward/forward 1 minute\n"
a11d11aa 2990 "mouse click seek to percentage in file corresponding to fraction of width\n"
01310af2 2991 );
01310af2
FB
2992}
2993
358061f6 2994static void opt_input_file(const char *filename)
01310af2 2995{
07a70b38
SS
2996 if (input_filename) {
2997 fprintf(stderr, "Argument '%s' provided as input filename, but '%s' was already specified.\n",
2998 filename, input_filename);
2999 exit(1);
3000 }
e8d83e1c 3001 if (!strcmp(filename, "-"))
9fcfc0b7 3002 filename = "pipe:";
01310af2
FB
3003 input_filename = filename;
3004}
3005
3006/* Called from the main */
3007int main(int argc, char **argv)
3008{
e43d7a18 3009 int flags, i;
115329f1 3010
01310af2 3011 /* register all codecs, demux and protocols */
c721d803
LA
3012 avcodec_register_all();
3013 avdevice_register_all();
917d2bb3
MN
3014#if CONFIG_AVFILTER
3015 avfilter_register_all();
3016#endif
01310af2
FB
3017 av_register_all();
3018
e43d7a18 3019 for(i=0; i<CODEC_TYPE_NB; i++){
636f1c4c 3020 avcodec_opts[i]= avcodec_alloc_context2(i);
e43d7a18 3021 }
8e2fd8e1 3022 avformat_opts = avformat_alloc_context();
917d2bb3 3023#if !CONFIG_AVFILTER
e43d7a18 3024 sws_opts = sws_getContext(16,16,0, 16,16,0, sws_flags, NULL,NULL,NULL);
917d2bb3 3025#endif
e43d7a18 3026
ea9c581f 3027 show_banner();
4cfac5bc 3028
f5da5c93 3029 parse_options(argc, argv, options, opt_input_file);
01310af2 3030
aab1b7e5 3031 if (!input_filename) {
7f11e745 3032 show_usage();
7a7da6b4 3033 fprintf(stderr, "An input file must be specified\n");
7f11e745 3034 fprintf(stderr, "Use -h to get full help or, even better, run 'man ffplay'\n");
aab1b7e5
SS
3035 exit(1);
3036 }
01310af2
FB
3037
3038 if (display_disable) {
3039 video_disable = 1;
3040 }
31319a8c 3041 flags = SDL_INIT_VIDEO | SDL_INIT_AUDIO | SDL_INIT_TIMER;
c97f5402
DB
3042#if !defined(__MINGW32__) && !defined(__APPLE__)
3043 flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */
31319a8c 3044#endif
01310af2 3045 if (SDL_Init (flags)) {
05ab0b76 3046 fprintf(stderr, "Could not initialize SDL - %s\n", SDL_GetError());
01310af2
FB
3047 exit(1);
3048 }
3049
3050 if (!display_disable) {
b250f9c6 3051#if HAVE_SDL_VIDEO_SIZE
3ef17d62
MR
3052 const SDL_VideoInfo *vi = SDL_GetVideoInfo();
3053 fs_screen_width = vi->current_w;
3054 fs_screen_height = vi->current_h;
29f3b38a 3055#endif
01310af2
FB
3056 }
3057
3058 SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE);
01310af2
FB
3059 SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE);
3060 SDL_EventState(SDL_USEREVENT, SDL_IGNORE);
3061
39c6a118
MN
3062 av_init_packet(&flush_pkt);
3063 flush_pkt.data= "FLUSH";
3064
638c9d91 3065 cur_stream = stream_open(input_filename, file_iformat);
01310af2
FB
3066
3067 event_loop();
3068
3069 /* never returns */
3070
3071 return 0;
3072}